commit 5bb28d2ebb87b00261b4abc9d7ba2aec2eebcb3c
Author: old-tom <892955278@qq.com>
Date: Sun Apr 9 10:52:35 2023 +0800
first commit
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..a653c43
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,162 @@
+# ---> Python
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+*.pyc
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+.idea/
\ No newline at end of file
diff --git a/Pipfile b/Pipfile
new file mode 100644
index 0000000..f850403
--- /dev/null
+++ b/Pipfile
@@ -0,0 +1,12 @@
+[[source]]
+url = "https://pypi.org/simple"
+verify_ssl = true
+name = "pypi"
+
+[packages]
+sqlalchemy = "*"
+
+[dev-packages]
+
+[requires]
+python_version = "3.8"
diff --git a/Pipfile.lock b/Pipfile.lock
new file mode 100644
index 0000000..6e2f4f2
--- /dev/null
+++ b/Pipfile.lock
@@ -0,0 +1,142 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "b97e95210411fba046eb9e76c0be6c64ebad1ae4e815c86badba93f4590a3efe"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.8"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "greenlet": {
+ "hashes": [
+ "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a",
+ "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a",
+ "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43",
+ "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33",
+ "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8",
+ "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088",
+ "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca",
+ "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343",
+ "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645",
+ "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db",
+ "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df",
+ "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3",
+ "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86",
+ "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2",
+ "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a",
+ "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf",
+ "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7",
+ "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394",
+ "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40",
+ "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3",
+ "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6",
+ "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74",
+ "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0",
+ "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3",
+ "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91",
+ "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5",
+ "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9",
+ "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8",
+ "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b",
+ "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6",
+ "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb",
+ "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73",
+ "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b",
+ "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df",
+ "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9",
+ "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f",
+ "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0",
+ "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857",
+ "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a",
+ "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249",
+ "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30",
+ "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292",
+ "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b",
+ "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d",
+ "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b",
+ "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c",
+ "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca",
+ "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7",
+ "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75",
+ "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae",
+ "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b",
+ "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470",
+ "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564",
+ "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9",
+ "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099",
+ "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0",
+ "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5",
+ "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19",
+ "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1",
+ "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"
+ ],
+ "markers": "platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32')))))",
+ "version": "==2.0.2"
+ },
+ "sqlalchemy": {
+ "hashes": [
+ "sha256:07950fc82f844a2de67ddb4e535f29b65652b4d95e8b847823ce66a6d540a41d",
+ "sha256:0a865b5ec4ba24f57c33b633b728e43fde77b968911a6046443f581b25d29dd9",
+ "sha256:0b49f1f71d7a44329a43d3edd38cc5ee4c058dfef4487498393d16172007954b",
+ "sha256:13f984a190d249769a050634b248aef8991acc035e849d02b634ea006c028fa8",
+ "sha256:1b69666e25cc03c602d9d3d460e1281810109e6546739187044fc256c67941ef",
+ "sha256:1d06e119cf79a3d80ab069f064a07152eb9ba541d084bdaee728d8a6f03fd03d",
+ "sha256:246712af9fc761d6c13f4f065470982e175d902e77aa4218c9cb9fc9ff565a0c",
+ "sha256:34eb96c1de91d8f31e988302243357bef3f7785e1b728c7d4b98bd0c117dafeb",
+ "sha256:4c3020afb144572c7bfcba9d7cce57ad42bff6e6115dffcfe2d4ae6d444a214f",
+ "sha256:4f759eccb66e6d495fb622eb7f4ac146ae674d829942ec18b7f5a35ddf029597",
+ "sha256:68ed381bc340b4a3d373dbfec1a8b971f6350139590c4ca3cb722fdb50035777",
+ "sha256:6b72dccc5864ea95c93e0a9c4e397708917fb450f96737b4a8395d009f90b868",
+ "sha256:6e84ab63d25d8564d7a8c05dc080659931a459ee27f6ed1cf4c91f292d184038",
+ "sha256:734805708632e3965c2c40081f9a59263c29ffa27cba9b02d4d92dfd57ba869f",
+ "sha256:78612edf4ba50d407d0eb3a64e9ec76e6efc2b5d9a5c63415d53e540266a230a",
+ "sha256:7e472e9627882f2d75b87ff91c5a2bc45b31a226efc7cc0a054a94fffef85862",
+ "sha256:865392a50a721445156809c1a6d6ab6437be70c1c2599f591a8849ed95d3c693",
+ "sha256:8d118e233f416d713aac715e2c1101e17f91e696ff315fc9efbc75b70d11e740",
+ "sha256:8d3ece5960b3e821e43a4927cc851b6e84a431976d3ffe02aadb96519044807e",
+ "sha256:93c78d42c14aa9a9e0866eacd5b48df40a50d0e2790ee377af7910d224afddcf",
+ "sha256:95719215e3ec7337b9f57c3c2eda0e6a7619be194a5166c07c1e599f6afc20fa",
+ "sha256:9838bd247ee42eb74193d865e48dd62eb50e45e3fdceb0fdef3351133ee53dcf",
+ "sha256:aa5c270ece17c0c0e0a38f2530c16b20ea05d8b794e46c79171a86b93b758891",
+ "sha256:ac6a0311fb21a99855953f84c43fcff4bdca27a2ffcc4f4d806b26b54b5cddc9",
+ "sha256:ad5363a1c65fde7b7466769d4261126d07d872fc2e816487ae6cec93da604b6b",
+ "sha256:b3e5864eba71a3718236a120547e52c8da2ccb57cc96cecd0480106a0c799c92",
+ "sha256:bbda1da8d541904ba262825a833c9f619e93cb3fd1156be0a5e43cd54d588dcd",
+ "sha256:c6e27189ff9aebfb2c02fd252c629ea58657e7a5ff1a321b7fc9c2bf6dc0b5f3",
+ "sha256:c8239ce63a90007bce479adf5460d48c1adae4b933d8e39a4eafecfc084e503c",
+ "sha256:d209594e68bec103ad5243ecac1b40bf5770c9ebf482df7abf175748a34f4853",
+ "sha256:d5327f54a9c39e7871fc532639616f3777304364a0bb9b89d6033ad34ef6c5f8",
+ "sha256:db4bd1c4792da753f914ff0b688086b9a8fd78bb9bc5ae8b6d2e65f176b81eb9",
+ "sha256:e4780be0f19e5894c17f75fc8de2fe1ae233ab37827125239ceb593c6f6bd1e2",
+ "sha256:e4a019f723b6c1e6b3781be00fb9e0844bc6156f9951c836ff60787cc3938d76",
+ "sha256:e62c4e762d6fd2901692a093f208a6a6575b930e9458ad58c2a7f080dd6132da",
+ "sha256:e730603cae5747bc6d6dece98b45a57d647ed553c8d5ecef602697b1c1501cf2",
+ "sha256:ebc4eeb1737a5a9bdb0c24f4c982319fa6edd23cdee27180978c29cbb026f2bd",
+ "sha256:ee2946042cc7851842d7a086a92b9b7b494cbe8c3e7e4627e27bc912d3a7655e",
+ "sha256:f005245e1cb9b8ca53df73ee85e029ac43155e062405015e49ec6187a2e3fb44",
+ "sha256:f49c5d3c070a72ecb96df703966c9678dda0d4cb2e2736f88d15f5e1203b4159",
+ "sha256:f61ab84956dc628c8dfe9d105b6aec38afb96adae3e5e7da6085b583ff6ea789"
+ ],
+ "index": "pypi",
+ "version": "==2.0.9"
+ },
+ "typing-extensions": {
+ "hashes": [
+ "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb",
+ "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"
+ ],
+ "markers": "python_version >= '3.7'",
+ "version": "==4.5.0"
+ }
+ },
+ "develop": {}
+}
diff --git a/common/__init__.py b/common/__init__.py
new file mode 100644
index 0000000..1a6c84f
--- /dev/null
+++ b/common/__init__.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/4 16:00
+# @Author : old tom
+# @File : __init__.py.py
+# @Project : futool-tiny-datahub
+# @Desc : 通用模块
diff --git a/common/fudb/__init__.py b/common/fudb/__init__.py
new file mode 100644
index 0000000..781bb39
--- /dev/null
+++ b/common/fudb/__init__.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/4 16:00
+# @Author : old tom
+# @File : __init__.py.py
+# @Project : futool-db
+# @Desc : 数据库模块
diff --git a/common/fudb/connectors/__init__.py b/common/fudb/connectors/__init__.py
new file mode 100644
index 0000000..a228065
--- /dev/null
+++ b/common/fudb/connectors/__init__.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/4 16:02
+# @Author : old tom
+# @File : __init__.py.py
+# @Project : futool-db
+# @Desc : 数据库连接器
diff --git a/common/fudb/connectors/connector_cache.py b/common/fudb/connectors/connector_cache.py
new file mode 100644
index 0000000..3a7897d
--- /dev/null
+++ b/common/fudb/connectors/connector_cache.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/9 0:02
+# @Author : old tom
+# @File : connector_cache.py
+# @Project : futool-tiny-datahub
+# @Desc :
+
+class ConnectorCacheManage(object):
+ """
+ 连接器缓存管理
+ todo 缓存可能会出现V被GC回收,但是K还存在的问题,需要加入轮询机制处理
+ """
+ # 缓存容器
+ CONNECTOR_CACHE = {}
+
+ def exist(self, connector_id):
+ return connector_id in self.CONNECTOR_CACHE.keys()
+
+ def get(self, connector_id):
+ return self.CONNECTOR_CACHE[connector_id]
+
+ def set(self, connector_id, connector):
+ self.CONNECTOR_CACHE[connector_id] = connector
+
+
+# 模块导入实现单例模式
+connector_cache = ConnectorCacheManage()
diff --git a/common/fudb/connectors/connector_factory.py b/common/fudb/connectors/connector_factory.py
new file mode 100644
index 0000000..e0368b4
--- /dev/null
+++ b/common/fudb/connectors/connector_factory.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/4 18:33
+# @Author : old tom
+# @File : connector_factory.py
+# @Project : futool-db
+# @Desc : 连接器工厂
+import hashlib
+
+from common.fudb.connectors.dialect.dialect_connector import OracleConnector, PostgresqlConnector
+from datahub.datasource.constant import ds_conf_param
+from urllib.parse import quote_plus as urlquote
+from common.fudb.connectors.connector_cache import connector_cache
+
+
+class ConnFactory(object):
+ """
+ 数据库连接器工厂
+ """
+ CONNECTION_CONTAINER = {
+ 'oracle': OracleConnector,
+ 'postgresql': PostgresqlConnector
+ }
+
+ def __init__(self, conf: ds_conf_param):
+ self.db_type = conf.db_type
+ # 生成连接ID
+ self.connector_id = self._gen_connector_id(conf.db_type, conf.user, conf.password, conf.host, conf.port,
+ conf.database)
+ # 尝试从缓存获取
+ if connector_cache.exist(self.connector_id):
+ self.connector = connector_cache.get(self.connector_id)
+ else:
+ # urlquote 用于处理密码中的特殊字符例如@
+ self.connector = self.CONNECTION_CONTAINER[self.db_type](conf.user, urlquote(conf.password), conf.host,
+ conf.port, conf.database)
+ connector_cache.set(self.connector_id, self.connector)
+
+ def get_conn(self):
+ """
+ 获取连接
+ :return:
+ """
+ return self.connector.get_conn()
+
+ @staticmethod
+ def _gen_connector_id(db_type, user, password, host, port, database):
+ # 保证相同输入下MD5计算结果一致
+ md5 = hashlib.md5()
+ md5.update(
+ (db_type + user + password + host + str(port) + password + database).encode(encoding='utf-8'))
+ return md5.hexdigest()
diff --git a/common/fudb/connectors/dialect/__init__.py b/common/fudb/connectors/dialect/__init__.py
new file mode 100644
index 0000000..c5abc5d
--- /dev/null
+++ b/common/fudb/connectors/dialect/__init__.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/4 18:54
+# @Author : old tom
+# @File : __init__.py.py
+# @Project : futool-db
+# @Desc :
diff --git a/common/fudb/connectors/dialect/abs_connector.py b/common/fudb/connectors/dialect/abs_connector.py
new file mode 100644
index 0000000..d5a9744
--- /dev/null
+++ b/common/fudb/connectors/dialect/abs_connector.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/4 16:04
+# @Author : old tom
+# @File : abs_connector.py
+# @Project : futool-db
+# @Desc : 抽象层
+
+import abc
+from sqlalchemy import create_engine
+
+
+class CommonConnector(metaclass=abc.ABCMeta):
+
+ def __init__(self, db_conf: str):
+ # 初始化
+ self.engine = create_engine(db_conf, pool_size=15, pool_recycle=3600)
+
+ @abc.abstractmethod
+ def get_conn(self):
+ """
+ 获取连接
+ :return:
+ """
+ pass
diff --git a/common/fudb/connectors/dialect/dialect_connector.py b/common/fudb/connectors/dialect/dialect_connector.py
new file mode 100644
index 0000000..e7bc957
--- /dev/null
+++ b/common/fudb/connectors/dialect/dialect_connector.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/4 18:55
+# @Author : old tom
+# @File : dialect_connector.py
+# @Project : futool-db
+# @Desc :
+from common.fudb.connectors.dialect.abs_connector import CommonConnector
+
+
+class OracleConnector(CommonConnector):
+ """
+ oracle 连接器,目前仅支持sever_name方式,SID方式待开发
+ """
+
+ DSN = 'oracle+cx_oracle://{0}:{1}@{2}:{3}/?service_name={4}'
+
+ def __init__(self, user, password, host, port=1521, server_name='orcl'):
+ super().__init__(self.DSN.format(user, password, host, port, server_name))
+
+ def get_conn(self):
+ return self.engine.connect()
+
+
+class PostgresqlConnector(CommonConnector):
+ """
+ pg连接器
+ """
+ PG_DIALECT = 'postgresql+psycopg2://{0}:{1}@{2}:{3}/{4}'
+
+ def __init__(self, user, password, host, port=5432, database='postgres'):
+ super().__init__(self.PG_DIALECT.format(user, password, host, port, database))
+
+ def get_conn(self):
+ return self.engine.connect()
diff --git a/common/fudb/dbapis/__init__.py b/common/fudb/dbapis/__init__.py
new file mode 100644
index 0000000..30f1242
--- /dev/null
+++ b/common/fudb/dbapis/__init__.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/4 16:03
+# @Author : old tom
+# @File : __init__.py.py
+# @Project : futool-db
+# @Desc : 操作数据库API
diff --git a/common/fudb/dbapis/fu_collection.py b/common/fudb/dbapis/fu_collection.py
new file mode 100644
index 0000000..af3ad4b
--- /dev/null
+++ b/common/fudb/dbapis/fu_collection.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/5 11:26
+# @Author : old tom
+# @File : fu_collection.py
+# @Project : futool-db
+# @Desc : 集合类工具
+
+def split_coll(data: [], part_size=5):
+ """
+ 分割集合
+ :param data:
+ :param part_size:
+ :return:
+ """
+ rt = []
+ if len(data) <= part_size:
+ rt.append(data)
+ else:
+ rt.append(data[0:part_size])
+ for j, d in enumerate(data):
+ if j > 0 and j % part_size == 0:
+ rt.append(data[j:j + part_size])
+ return rt
diff --git a/common/fudb/dbapis/fu_dao.py b/common/fudb/dbapis/fu_dao.py
new file mode 100644
index 0000000..34d762a
--- /dev/null
+++ b/common/fudb/dbapis/fu_dao.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/8 10:12
+# @Author : old tom
+# @File : fu_dao.py
+# @Project : futool-tiny-datahub
+# @Desc : 通用dao层
+from collections import namedtuple
+
+from common.fudb.connectors.connector_factory import ConnFactory
+from common.fudb.dbapis.fu_db_api import select_all, select_one, batch_insert, count, execute_update
+
+
+class BaseDao(object):
+ def __init__(self, connector: ConnFactory):
+ self.connector = connector
+
+ def query_all(self, sql):
+ return select_all(self.connector.get_conn(), sql)
+
+ def query_one(self, sql):
+ return select_one(self.connector.get_conn(), sql)
+
+ def count(self, table_name):
+ return count(self.connector.get_conn(), table_name)
+
+ def execute_update(self, sql):
+ return execute_update(self.connector.get_conn(), sql)
+
+ def batch_insert(self, db_type, sql_tpl, data, batch_size):
+ return batch_insert(self.connector.get_conn(), db_type, sql_tpl, data, batch_size)
+
+ def dynamic_update_by_param(self, table_name, condition, param: dict):
+ """
+ 动态更新语句
+ :param condition:
+ :param table_name: 表名
+ :param param: 命名元组参数
+ :return:
+ """
+ sql = f'update {table_name} set '
+ for k, v in param.items():
+ sql += f'{k}=' + (f"'{v}'" if isinstance(v, str) else f'{v}') + ","
+ sql = sql[0: -1]
+ sql += f' where {condition}'
+ return self.execute_update(sql)
diff --git a/common/fudb/dbapis/fu_db_api.py b/common/fudb/dbapis/fu_db_api.py
new file mode 100644
index 0000000..2bad8f5
--- /dev/null
+++ b/common/fudb/dbapis/fu_db_api.py
@@ -0,0 +1,188 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/4 17:29
+# @Author : old tom
+# @File : fu_db_api.py
+# @Project : futool-db
+# @Desc :
+
+from sqlalchemy import Connection, text, CursorResult
+from common.fudb.dbapis.fu_collection import split_coll
+
+
+class SqlExecuteError(Exception):
+ def __init__(self, msg=''):
+ Exception.__init__(self, msg)
+
+
+def _select(conn: Connection, sql) -> CursorResult:
+ """
+ 自动关闭连接
+ :param conn:
+ :return:
+ """
+ try:
+ return conn.execute(text(sql))
+ finally:
+ conn.close()
+
+
+def _execute_with_tx(conn: Connection, sql):
+ """
+ 带事务执行SQL
+ :return:
+ """
+ try:
+ conn.begin()
+ rt = conn.execute(text(sql))
+ conn.commit()
+ return rt.rowcount
+ except Exception as e:
+ conn.rollback()
+ raise SqlExecuteError(msg=f'sql [{sql}] 执行失败,开始回滚,e={e}')
+ finally:
+ conn.close()
+
+
+def select_one(conn: Connection, sql):
+ """
+ 查询一个
+ :param conn:
+ :param sql:
+ :return:
+ """
+ return _select(conn, sql).fetchone()
+
+
+def select_all(conn: Connection, sql):
+ """
+ 查询全部
+ :param conn:
+ :param sql:
+ :return:
+ """
+ return _select(conn, sql).fetchall()
+
+
+def count(conn: Connection, table):
+ """
+ 统计数据量
+ :param conn:
+ :param table:
+ :return:
+ """
+ count_tpl = f'select count(1) from {table}'
+ return select_one(conn, count_tpl)[0]
+
+
+def execute_update(conn: Connection, sql):
+ """
+ 带事务执行,可用于insert update delete 语句
+ :param conn:
+ :param sql:
+ :return: 受影响的行数,与java-jdbc的execute_update返回true|false相似,可用于判断是否执行成功
+ """
+ return _execute_with_tx(conn, sql)
+
+
+def batch_insert(conn: Connection, db_type, sql_tpl, data, batch_size=1500):
+ """
+ 批量插入
+ :param conn: 数据库连接
+ :param batch_size: 每次插入量
+ :param db_type: 数据库类型
+ :param sql_tpl: insert into t1 (f1,f2,f3) values %s
+ :param data: [(1,'tom',29),(2,'jack',30)]
+ :return:
+ """
+ handler = BatchInsertHandler(db_type, sql_tpl, data, batch_size)
+ insert_sqls = handler.build_insert()
+ # 插入都在一个事务内
+ row_count = 0
+ try:
+ conn.begin()
+ for sql_set in insert_sqls:
+ rt = conn.execute(text(sql_set))
+ row_count += rt.rowcount
+ conn.commit()
+ return row_count
+ except Exception as e:
+ conn.rollback()
+ raise SqlExecuteError(msg=f"批量插入异常,e={e}")
+ finally:
+ conn.close()
+
+
+class BatchInsertHandler(object):
+ """
+ 批量插入处理器
+ oracle :
+ insert all
+ into oracle_table ( id, code ) values( 1 , '1' )
+ into oracle_table ( id, code ) values( 2 , '2' )
+ into oracle_table ( id, code ) values( 3 , '3' )
+ into oracle_table ( id, code ) values( 4 , '4' )
+ select 1 from dual ;
+ postgresql and mysql
+ into oracle_table ( id, code ) values( 1 , '1' ),( 2 , '2' ),( 3 , '3' )
+
+ """
+
+ BUILD_INSERT = {
+ 'oracle': 'build_oracle_insert',
+ 'postgresql': 'build_pg_insert',
+ 'mysql': 'build_mysql_insert'
+ }
+
+ class NotSupportError(Exception):
+ def __init__(self, msg=''):
+ Exception.__init__(self, msg)
+
+ def __init__(self, db_type, sql_tpl, data, batch_size):
+ """
+ :param db_type: 数据库类型
+ :param sql_tpl: pg及mysql: insert into t1 (f1,f2,f3) values %s
+ oracle: into t1 (f1,f2,f3) values %s
+ :param data: [(1,'tom',29),(2,'jack',30)]
+ :param batch_size:
+ """
+ if db_type not in ['oracle', 'postgresql']:
+ raise self.NotSupportError()
+ self.db_type = db_type
+ self.sql_tpl = sql_tpl
+ self.data = data
+ self.batch_size = batch_size
+
+ def _split_data(self):
+ return split_coll(self.data, self.batch_size)
+
+ def build_insert(self):
+ data_set = self._split_data()
+ sql_set = []
+ for part in data_set:
+ sql_set.append(getattr(self, self.BUILD_INSERT[self.db_type])(part))
+ return sql_set
+
+ def build_oracle_insert(self, data_set):
+ begin = 'insert all \r '
+ for ds in data_set:
+ val = '('
+ for ele in ds:
+ val += "'" + ele + "'," if isinstance(ele, str) else str(ele) + ','
+ val = val[0:-1] + ')'
+ begin += (self.sql_tpl.replace('%s', val) + ' \r ')
+ end = 'select 1 from dual'
+ return begin + end
+
+ def build_pg_insert(self, data_set):
+ vals = ''
+ for ds in data_set:
+ val = '('
+ for ele in ds:
+ val += "'" + ele + "'," if isinstance(ele, str) else str(ele) + ','
+ val = val[0:-1] + ')'
+ vals += val + ','
+ return self.sql_tpl.replace('%s', vals[0:-1])
+
+ def build_mysql_insert(self, data_set):
+ return self.build_pg_insert(data_set)
diff --git a/common/log_conf.py b/common/log_conf.py
new file mode 100644
index 0000000..93349ea
--- /dev/null
+++ b/common/log_conf.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/8 11:05
+# @Author : old tom
+# @File : log_conf.py
+# @Project : futool-tiny-datahub
+# @Desc : 日志配置
+
+import sys
+from loguru import logger
+
+# 日志输出路径
+LOG_PATH = '../logout/info_log.txt'
+
+
+class Logger(object):
+ def __init__(self):
+ self.logger = logger
+ self.logger.remove()
+ self.logger.add(sys.stdout,
+ format="{time:YYYY-MM-DD HH:mm:ss} | " # 颜色>时间
+ "{process.name} | " # 进程名
+ "{thread.name} | " # 进程名
+ "{module}.{function}" # 模块名.方法名
+ ":{line} | " # 行号
+ "{level}: " # 等级
+ "{message}", # 日志内容
+ )
+ # 输出到文件的格式,注释下面的add',则关闭日志写入
+ self.logger.add(LOG_PATH, level='DEBUG',
+ format='{time:YYYYMMDD HH:mm:ss} - ' # 时间
+ "{process.name} | " # 进程名
+ "{thread.name} | " # 进程名
+ '{module}.{function}:{line} - {level} -{message}', # 模块名.方法名:行号
+ rotation="10 MB")
+
+ def get_logger(self):
+ return self.logger
diff --git a/datahub/__init__.py b/datahub/__init__.py
new file mode 100644
index 0000000..df1d656
--- /dev/null
+++ b/datahub/__init__.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/6 9:31
+# @Author : old tom
+# @File : __init__.py.py
+# @Project : futool-tiny-datahub
+# @Desc :
diff --git a/datahub/datasource/__init__.py b/datahub/datasource/__init__.py
new file mode 100644
index 0000000..37d0347
--- /dev/null
+++ b/datahub/datasource/__init__.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/8 9:03
+# @Author : old tom
+# @File : __init__.py.py
+# @Project : futool-tiny-datahub
+# @Desc : 数据源管理
diff --git a/datahub/datasource/constant.py b/datahub/datasource/constant.py
new file mode 100644
index 0000000..a0fbc38
--- /dev/null
+++ b/datahub/datasource/constant.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/8 13:19
+# @Author : old tom
+# @File : constant.py
+# @Project : futool-tiny-datahub
+# @Desc :
+from collections import namedtuple
+
+# 数据库类型
+DB_TYPE = ['oracle', 'mysql', 'postgresql', 'sqlserver']
+# 需要执行select 1 from dual的数据库
+DUAL_DB_TYPE = ['oracle', 'mysql']
+# 命名元组处理传参
+# 数据库配置
+ds_conf_param = namedtuple('datasource_conf', ['db_type', 'user', 'password', 'host', 'port', 'database'])
diff --git a/datahub/datasource/datasource_manage.py b/datahub/datasource/datasource_manage.py
new file mode 100644
index 0000000..d1384c7
--- /dev/null
+++ b/datahub/datasource/datasource_manage.py
@@ -0,0 +1,118 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/8 9:25
+# @Author : old tom
+# @File : datasource_manage.py
+# @Project : futool-tiny-datahub
+# @Desc : 元数据管理
+
+from common.fudb.connectors.connector_factory import ConnFactory
+from common.fudb.dbapis.fu_db_api import select_one
+from datahub.datasource.dsdao.ds_dao import DataSourceDao
+from common.log_conf import Logger
+from datahub.datasource.constant import ds_conf_param, DUAL_DB_TYPE
+
+# 日志
+logger = Logger().get_logger()
+
+
+class DataSource(object):
+ def __init__(self, source_id, conf: ds_conf_param):
+ self.source_id = source_id
+ self.conf = conf
+ self.connector = ConnFactory(conf)
+
+ def get_connector(self):
+ return self.connector
+
+
+class DataSourceManage(object):
+ """
+ 数据源管理
+ """
+
+ def __init__(self, local_db):
+ self.dao = DataSourceDao(local_db)
+
+ def add(self, conf: ds_conf_param):
+ # 初始化连接器
+ connector = ConnFactory(conf)
+ if self.dao.exist_by_source(connector.connector_id):
+ # 数据源已存在
+ return False, 'datasource all ready exists'
+ # 初始化校验器
+ checker = DataSourceChecker(connector)
+ result, msg = checker.check()
+ if result:
+ # 入库
+ return self.dao.add_datasource(connector.connector_id, conf), ''
+ else:
+ # 返回错误信息
+ return result, f'check failed,{msg}'
+
+ def remove(self, source_id):
+ self.dao.remove_datasource(source_id)
+
+ def check(self, source_id):
+ pass
+
+ def deactivate(self, source_id):
+ """
+ 停用数据源
+ :param source_id:
+ :return:
+ """
+ return self.dao.deactivate_datasource(source_id)
+
+ def edit(self, source_id, param_dict):
+ """
+ 编辑数据源
+ :return:
+ """
+ return self.dao.edit_datasource_conf(source_id, param_dict)
+
+ def get(self, source_id) -> DataSource:
+ """
+ 获取数据源
+ :param source_id:
+ :return:
+ """
+ conf = self.dao.query_datasource_conf(source_id)
+ # 转为命名元组
+ conf_tuple = ds_conf_param._make(conf)
+ return DataSource(source_id, conf_tuple)
+
+
+class DataSourceChecker(object):
+ """
+ 数据源检测
+ """
+
+ def __init__(self, connector: ConnFactory):
+ self.connector = connector
+
+ def check(self):
+ """
+ 未知数据源连接测试
+ :return:
+ """
+ try:
+ # 获取连接
+ conn = self.connector.get_conn()
+ except Exception as e:
+ return False, f'cannot get connection,e={e}'
+ try:
+ # 测试select 1
+ return int(select_one(conn, 'select 1 ' + (
+ 'from dual' if self.connector.db_type in DUAL_DB_TYPE else ''))) > 0, 'success'
+ except Exception as e:
+ return False, f'cannot execute "select 1",e={e}'
+
+
+if __name__ == '__main__':
+ local_ds = ds_conf_param('postgresql', 'postgres', 'root@123', 'localhost', 5432, 'postgres')
+ factory_1 = ConnFactory(local_ds)
+ factory_2 = ConnFactory(local_ds)
+ print(factory_1.connector_id, factory_2.connector_id)
+ print(factory_1 is factory_2)
+ print(factory_2.connector is factory_1.connector)
diff --git a/datahub/datasource/datasource_web_api.py b/datahub/datasource/datasource_web_api.py
new file mode 100644
index 0000000..cf75147
--- /dev/null
+++ b/datahub/datasource/datasource_web_api.py
@@ -0,0 +1,8 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/8 11:04
+# @Author : old tom
+# @File : datasource_web_api.py
+# @Project : futool-tiny-datahub
+# @Desc : http接口
+
diff --git a/datahub/datasource/dsdao/__init__.py b/datahub/datasource/dsdao/__init__.py
new file mode 100644
index 0000000..468b861
--- /dev/null
+++ b/datahub/datasource/dsdao/__init__.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/8 10:11
+# @Author : old tom
+# @File : __init__.py.py
+# @Project : futool-tiny-datahub
+# @Desc :
diff --git a/datahub/datasource/dsdao/ds_dao.py b/datahub/datasource/dsdao/ds_dao.py
new file mode 100644
index 0000000..882458d
--- /dev/null
+++ b/datahub/datasource/dsdao/ds_dao.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/8 10:11
+# @Author : old tom
+# @File : ds_dao.py
+# @Project : futool-tiny-datahub
+# @Desc : 数据库操作
+
+from common.fudb.connectors.connector_factory import ConnFactory
+from common.fudb.dbapis.fu_dao import BaseDao
+from datahub.datasource.constant import ds_conf_param
+
+
+class DataSourceDao(BaseDao):
+ def __init__(self, connector: ConnFactory):
+ super().__init__(connector)
+
+ def add_datasource(self, source_id, conf: ds_conf_param):
+ sql = f"insert into datasource_main (source_id,source_type,host,port,username,password,database_name) values ('{source_id}'" \
+ f",'{conf.db_type}','{conf.host}',{conf.port},'{conf.user}','{conf.password}','{conf.database}')"
+ return self.execute_update(sql) > 0
+
+ def remove_datasource(self, source_id):
+ return self.execute_update(f"delete from datasource_main where source_id='{source_id}'")
+
+ def deactivate_datasource(self, source_id):
+ return self.execute_update(
+ f"update datasource_main set has_used='N' where source_id='{source_id}'") > 0
+
+ def exist_by_source(self, source_id):
+ """
+ 判断数据源是否存在
+ :param source_id:
+ :return:
+ """
+ return self.query_one(f"select 1 from datasource_main where source_id='{source_id}'") > 0
+
+ def edit_datasource_conf(self, source_id, param_dict):
+ """
+ 数据源编辑
+ :param source_id: 主键
+ :param param_dict: 参数字典 kv,k与数据库字段名称相同
+ :return:
+ """
+ return self.dynamic_update_by_param('datasource_main', f"where source_id='{source_id}'",
+ param_dict)
+
+ def query_datasource_conf(self, source_id):
+ conf_field = ['source_type', 'username', 'password', 'host', 'port', 'database_name']
+ return self.query_one(
+ f"select {','.join(conf_field)} from datasource_main where source_id='{source_id}'")
+
+
+if __name__ == '__main__':
+ local_ds = ds_conf_param('postgresql', 'postgres', 'root@123', 'localhost', 5432, 'postgres')
+ dao = DataSourceDao(ConnFactory(local_ds))
+ rt = dao.query_datasource_conf('db143d11741a9575fdea92ed2b39dc53')
+ print(ds_conf_param._make(rt))
diff --git a/datahub/graph/__init__.py b/datahub/graph/__init__.py
new file mode 100644
index 0000000..11487c0
--- /dev/null
+++ b/datahub/graph/__init__.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/6 9:32
+# @Author : old tom
+# @File : __init__.py.py
+# @Project : futool-tiny-datahub
+# @Desc : 数据关系上图
diff --git a/datahub/logout/info_log.txt b/datahub/logout/info_log.txt
new file mode 100644
index 0000000..e69de29
diff --git a/datahub/metadata/__init__.py b/datahub/metadata/__init__.py
new file mode 100644
index 0000000..dae89e5
--- /dev/null
+++ b/datahub/metadata/__init__.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/6 9:32
+# @Author : old tom
+# @File : __init__.py.py
+# @Project : futool-tiny-datahub
+# @Desc : 元数据管理
diff --git a/datahub/metadata/metadata_reader.py b/datahub/metadata/metadata_reader.py
new file mode 100644
index 0000000..7139129
--- /dev/null
+++ b/datahub/metadata/metadata_reader.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/8 9:05
+# @Author : old tom
+# @File : metadata_reader.py
+# @Project : futool-tiny-datahub
+# @Desc : 元数据读取
+
+import abc
+from configparser import ConfigParser
+
+from datahub.datasource.constant import ds_conf_param
+from datahub.datasource.datasource_manage import DataSource, DataSourceManage
+from datahub.metadata.metadatadao.metadata_dao import MetadataDao
+from common.fudb.connectors.connector_factory import ConnFactory
+
+reader_conf = ConfigParser()
+reader_conf.read('./reader_conf.ini')
+
+
+class AbsMetadataReader(metaclass=abc.ABCMeta):
+ """
+ 抽象元数据读取器
+ """
+
+ def __init__(self, datasource):
+ self.datasource = datasource
+
+ @abc.abstractmethod
+ def query_tables(self):
+ """
+ 查询当前连接下所有表
+ :return:
+ """
+ pass
+
+ @abc.abstractmethod
+ def query_views(self):
+ """
+ 查询当前连接下所有视图
+ :return:
+ """
+ pass
+
+ @abc.abstractmethod
+ def query_procedure(self):
+ """
+ 查询当前连接下所有存储过程
+ :return:
+ """
+ pass
+
+ @abc.abstractmethod
+ def query_table_fields(self, table):
+ """
+ 查询字段
+ :param table:
+ :return:
+ """
+ pass
+
+
+class MetadataReader(AbsMetadataReader):
+ def __init__(self, datasource: DataSource):
+ super().__init__(datasource)
+ self.db_type = datasource.connector.db_type
+ self.dao = MetadataDao(datasource.connector)
+
+ def query_tables(self):
+ return self.dao.query_all_tables(reader_conf[self.db_type]['tables'])
+
+ def query_views(self):
+ return self.dao.query_all_views(reader_conf[self.db_type]['views'])
+
+ def query_procedure(self):
+ pass
+
+ def query_table_fields(self, table):
+ pass
+
+
+if __name__ == '__main__':
+ local_ds = ds_conf_param('postgresql', 'postgres', 'root@123', 'localhost', 5432, 'postgres')
+ dsm = DataSourceManage(ConnFactory(local_ds))
+ ds = dsm.get('db143d11741a9575fdea92ed2b39dc53')
+ mtr = MetadataReader(ds)
+ print(mtr.query_tables())
+ print(mtr.query_views())
diff --git a/datahub/metadata/metadatadao/__init__.py b/datahub/metadata/metadatadao/__init__.py
new file mode 100644
index 0000000..489531c
--- /dev/null
+++ b/datahub/metadata/metadatadao/__init__.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/9 8:27
+# @Author : old tom
+# @File : __init__.py.py
+# @Project : futool-tiny-datahub
+# @Desc :
diff --git a/datahub/metadata/metadatadao/metadata_dao.py b/datahub/metadata/metadatadao/metadata_dao.py
new file mode 100644
index 0000000..f0ce952
--- /dev/null
+++ b/datahub/metadata/metadatadao/metadata_dao.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/9 8:27
+# @Author : old tom
+# @File : metadata_dao.py
+# @Project : futool-tiny-datahub
+# @Desc :
+
+from common.fudb.connectors.connector_factory import ConnFactory
+from common.fudb.dbapis.fu_dao import BaseDao
+
+
+class MetadataDao(BaseDao):
+ def __init__(self, connector: ConnFactory):
+ super().__init__(connector)
+
+ def query_all_tables(self, sql):
+ """
+ 查询所有表
+ :param sql:
+ :return:
+ """
+ return [t[0] for t in self.query_all(sql)]
+
+ def query_all_views(self, sql):
+ """
+ 查询所有视图
+ :param sql:
+ :return:
+ """
+ return [v[0] for v in self.query_all(sql)]
diff --git a/datahub/metadata/reader_conf.ini b/datahub/metadata/reader_conf.ini
new file mode 100644
index 0000000..a9a827d
--- /dev/null
+++ b/datahub/metadata/reader_conf.ini
@@ -0,0 +1,7 @@
+[oracle]
+tables = select distinct table_name from user_tab_comments where table_type='TABLE'
+views = select distinct table_name from user_tab_comments where table_type='VIEW'
+
+[postgresql]
+tables = SELECT distinct table_name FROM information_schema.tables WHERE table_schema = 'public' ORDER BY table_name
+views = select distinct table_name from information_schema.views WHERE table_schema = 'public' ORDER BY table_name
\ No newline at end of file
diff --git a/datahub/relation/__init__.py b/datahub/relation/__init__.py
new file mode 100644
index 0000000..7fab71a
--- /dev/null
+++ b/datahub/relation/__init__.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/6 9:33
+# @Author : old tom
+# @File : __init__.py.py
+# @Project : futool-tiny-datahub
+# @Desc : 表、字段依赖关系
diff --git a/datahub/relation/relation_analyze.py b/datahub/relation/relation_analyze.py
new file mode 100644
index 0000000..f48580b
--- /dev/null
+++ b/datahub/relation/relation_analyze.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time : 2023/4/9 9:18
+# @Author : old tom
+# @File : relation_analyze.py
+# @Project : futool-tiny-datahub
+# @Desc :
+
+class MetadataRelationAnalyzer(object):
+ """
+ 元数据关系分析
+ """
+
+ def __init__(self, source_id):
+ pass
diff --git a/logout/info_log.txt b/logout/info_log.txt
new file mode 100644
index 0000000..0a6512c
--- /dev/null
+++ b/logout/info_log.txt
@@ -0,0 +1,3 @@
+20230408 11:11:37 - MainProcess | MainThread | test_log.test:16 - INFO -fefefe
+20230408 11:11:37 - MainProcess | MainThread | test_log.test:17 - ERROR -223232
+20230408 11:11:37 - MainProcess | MainThread | test_log.test:18 - WARNING -999