Update backend (and add elasticsearch)
Co-authored-by: Ezequiel Bellver <ebellver@itba.edu.ar> Co-authored-by: Juan Barmasch <jbarmasch@itba.edu.ar>
This commit is contained in:
parent
75bd352824
commit
2c866293f3
|
@ -68,4 +68,5 @@ cython_debug/
|
|||
.python-version
|
||||
.vscode/*
|
||||
*.obj
|
||||
*_data
|
||||
*_data
|
||||
test.py
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
from fastapi import FastAPI
|
||||
from dotenv import load_dotenv
|
||||
from api.routes.auth import auth_routes
|
||||
from api.test.test import test_routes
|
||||
from api.components.document import document_routes
|
||||
|
||||
app = FastAPI()
|
||||
app.include_router(auth_routes, prefix="/api")
|
||||
app.include_router(test_routes, prefix="/api")
|
||||
app.include_router(document_routes, prefix="/api")
|
||||
load_dotenv()
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
import json
|
||||
|
||||
from fastapi import APIRouter, Response, status
|
||||
from pydantic import BaseModel
|
||||
from api.middleware.verify_token import VerifyTokenRoute
|
||||
import backend.mongo as mongo
|
||||
import backend.elastic as elastic
|
||||
|
||||
document_routes = APIRouter(route_class=VerifyTokenRoute)
|
||||
|
||||
|
||||
class Document(BaseModel):
|
||||
name: str
|
||||
access: list
|
||||
data: str
|
||||
owner: str
|
||||
|
||||
|
||||
class Data(BaseModel):
|
||||
data: str
|
||||
|
||||
|
||||
@document_routes.post("/document/create")
|
||||
def create(aux: Document, response: Response):
|
||||
mongo.create_document(json.loads(json.dumps(aux.__dict__)))
|
||||
response.status_code = status.HTTP_201_CREATED
|
||||
|
||||
|
||||
@document_routes.get("/document/{id}")
|
||||
def get_by_id(id: str):
|
||||
return mongo.get_document_by_id(id)
|
||||
|
||||
|
||||
@document_routes.put("/document/{id}")
|
||||
def edit_data(data: Data, id: str, response: Response):
|
||||
mongo.edit_data(id, data.data)
|
||||
response.status_code = status.HTTP_202_ACCEPTED
|
||||
|
||||
|
||||
@document_routes.get("/document")
|
||||
def search():
|
||||
return elastic.search("test-index")
|
|
@ -12,6 +12,7 @@ class VerifyTokenRoute(APIRoute):
|
|||
|
||||
validation_response = validate_token(token, output=False)
|
||||
|
||||
# return await original_route(request)
|
||||
if validation_response is None:
|
||||
return await original_route(request)
|
||||
else:
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
from api.middleware.verify_token import VerifyTokenRoute
|
||||
|
||||
test_routes = APIRouter(route_class=VerifyTokenRoute)
|
||||
|
||||
|
||||
class Test(BaseModel):
|
||||
test: str
|
||||
|
||||
|
||||
@test_routes.post("/test")
|
||||
def test(aux: Test):
|
||||
return aux.test
|
|
@ -0,0 +1,53 @@
|
|||
from os import getenv
|
||||
from elasticsearch import Elasticsearch
|
||||
|
||||
|
||||
def get_client():
|
||||
return Elasticsearch(getenv("ELASTIC_URL"))
|
||||
|
||||
|
||||
def add_document(index, id, doc):
|
||||
client = get_client()
|
||||
client.index(index=index, id=id, document=doc)
|
||||
|
||||
|
||||
def refresh_index(index):
|
||||
client = get_client()
|
||||
client.indices.refresh(index=index)
|
||||
|
||||
|
||||
def search(index):
|
||||
client = get_client()
|
||||
|
||||
resp = client.search(index=index, query={
|
||||
# "query_string": {
|
||||
# "query": "*puan*",
|
||||
# "default_field": "data"
|
||||
# }
|
||||
"bool": {
|
||||
"must": [
|
||||
{
|
||||
"query_string": {
|
||||
"query": "*new*",
|
||||
"default_field": "data"
|
||||
}
|
||||
},
|
||||
# {
|
||||
# "match": {
|
||||
# "id": "1",
|
||||
# }
|
||||
# }
|
||||
]
|
||||
}
|
||||
}, highlight={
|
||||
"fields": {
|
||||
"data": {}
|
||||
}
|
||||
})
|
||||
print("Got %d hit(s):" % resp['hits']['total']['value'])
|
||||
|
||||
for hit in resp['hits']['hits']:
|
||||
print(resp['hits']['total'])
|
||||
print(resp['hits'])
|
||||
print(hit["_source"])
|
||||
print("%(name)s: %(data)s" % hit["_source"])
|
132
backend/main.py
132
backend/main.py
|
@ -1,132 +0,0 @@
|
|||
import psycopg2
|
||||
from bson import ObjectId
|
||||
from psycopg2 import sql
|
||||
from pymongo import MongoClient
|
||||
|
||||
|
||||
def get_database():
|
||||
client = MongoClient("mongodb://root:password@localhost:27017")
|
||||
return client['documents']
|
||||
|
||||
|
||||
def create_document(document):
|
||||
dbname = get_database()
|
||||
docs_coll = dbname['docs']
|
||||
docs_coll.insert_one(document)
|
||||
|
||||
|
||||
def get_document_by_id(id):
|
||||
dbname = get_database()
|
||||
docs_coll = dbname['docs']
|
||||
return docs_coll.find({"_id": id})
|
||||
|
||||
|
||||
def get_document_by_name(name):
|
||||
dbname = get_database()
|
||||
docs_coll = dbname['docs']
|
||||
return docs_coll.find({"name": name})
|
||||
|
||||
|
||||
def edit_data(id, data):
|
||||
dbname = get_database()
|
||||
docs_coll = dbname['docs']
|
||||
docs_coll.update_one({"_id": id}, {"$set": {"data": data}})
|
||||
|
||||
|
||||
def edit_access(id, access):
|
||||
dbname = get_database()
|
||||
docs_coll = dbname['docs']
|
||||
docs_coll.update_one({"_id": id}, {"$set": {"access": access}})
|
||||
|
||||
|
||||
def edit_name(id, name):
|
||||
dbname = get_database()
|
||||
docs_coll = dbname['docs']
|
||||
docs_coll.update_one({"_id": id}, {"$set": {"name": name}})
|
||||
|
||||
|
||||
def create_table(name):
|
||||
cur = conn.cursor()
|
||||
cur.execute(sql.SQL("CREATE TABLE {table} (row_number SERIAL PRIMARY KEY)").format(table=sql.Identifier(name)))
|
||||
|
||||
|
||||
def add_column(name, column, type):
|
||||
cur = conn.cursor()
|
||||
cur.execute(sql.SQL("ALTER TABLE {table} ADD {column}" + type).format(
|
||||
table=sql.Identifier(name),
|
||||
column=sql.Identifier(column))
|
||||
)
|
||||
|
||||
|
||||
def insert_column(name, column, data):
|
||||
pass
|
||||
|
||||
|
||||
def insert_columns(name, data):
|
||||
cur = conn.cursor()
|
||||
|
||||
str = "(" + "DEFAULT, %s," * (len(data) - 1) + "%s" + ")" # TODO: change.
|
||||
# cur.execute(sql.SQL("INSERT INTO {table} VALUES %s").format(
|
||||
cur.execute(sql.SQL("INSERT INTO {table} VALUES" + str).format(
|
||||
table=sql.Identifier(name)),
|
||||
data
|
||||
)
|
||||
|
||||
|
||||
def edit_columns(name, columns, data, id):
|
||||
cur = conn.cursor()
|
||||
|
||||
i = 0
|
||||
for column in columns:
|
||||
cur.execute(sql.SQL("UPDATE {table} SET {col} = %s WHERE row_number = " + id).format(
|
||||
table=sql.Identifier(name),
|
||||
col=sql.Identifier(column)),
|
||||
[data[i]]
|
||||
)
|
||||
i += 1
|
||||
|
||||
|
||||
def remove_column(name, column):
|
||||
cur = conn.cursor()
|
||||
cur.execute(sql.SQL("ALTER TABLE {table} DROP COLUMN {column}").format(
|
||||
table=sql.Identifier(name),
|
||||
column=sql.Identifier(column))
|
||||
)
|
||||
|
||||
|
||||
def create_sort(name):
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
sql.SQL("CREATE TABLE {table} (property TEXT, _order CHAR(3), priority int)").format(
|
||||
table=sql.Identifier(name + "_sort")
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def add_sort(name, property, order, priority):
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
sql.SQL("INSERT INTO {table} VALUES (%s, %s, %s)").format(table=sql.Identifier(name + "_sort")),
|
||||
(property, order, priority)
|
||||
)
|
||||
|
||||
|
||||
def sort():
|
||||
pass
|
||||
|
||||
|
||||
def add_filter():
|
||||
pass
|
||||
|
||||
|
||||
conn = None
|
||||
|
||||
if __name__ == "__main__":
|
||||
conn = psycopg2.connect(
|
||||
host="localhost",
|
||||
database="bd2",
|
||||
user="root",
|
||||
password="password")
|
||||
|
||||
conn.commit()
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
from os import getenv
|
||||
from bson import ObjectId
|
||||
from pymongo import MongoClient
|
||||
import backend.elastic as elastic
|
||||
|
||||
|
||||
def get_database():
|
||||
client = MongoClient(getenv("MONGO_URL"))
|
||||
return client['documents']
|
||||
|
||||
|
||||
def create_document(document):
|
||||
dbname = get_database()
|
||||
docs_coll = dbname['docs']
|
||||
doc = document.copy()
|
||||
docs_coll.insert_one(document)
|
||||
elastic.add_document("test-index", document['_id'], doc)
|
||||
|
||||
|
||||
def get_document_by_id(id):
|
||||
dbname = get_database()
|
||||
docs_coll = dbname['docs']
|
||||
doc = docs_coll.find({"_id": ObjectId(id)}, {"_id": 0}).next().items()
|
||||
return doc
|
||||
|
||||
|
||||
def get_document_by_name(name):
|
||||
dbname = get_database()
|
||||
docs_coll = dbname['docs']
|
||||
return docs_coll.find({"name": name})
|
||||
|
||||
|
||||
def edit_data(id, data):
|
||||
dbname = get_database()
|
||||
docs_coll = dbname['docs']
|
||||
docs_coll.update_one({"_id": ObjectId(id)}, {"$set": {"data": data}})
|
||||
doc = docs_coll.find_one({"_id": ObjectId(id)}, {"_id": 0})
|
||||
elastic.add_document("test-index", id, doc)
|
||||
|
||||
|
||||
def edit_access(id, access):
|
||||
dbname = get_database()
|
||||
docs_coll = dbname['docs']
|
||||
docs_coll.update_one({"_id": id}, {"$set": {"access": access}})
|
||||
|
||||
|
||||
def edit_name(id, name):
|
||||
dbname = get_database()
|
||||
docs_coll = dbname['docs']
|
||||
docs_coll.update_one({"_id": id}, {"$set": {"name": name}})
|
|
@ -0,0 +1,211 @@
|
|||
from os import getenv
|
||||
|
||||
import psycopg2
|
||||
from dotenv import load_dotenv
|
||||
from psycopg2 import sql
|
||||
|
||||
|
||||
def get_connection():
|
||||
return psycopg2.connect(
|
||||
host=getenv("POSTGRES_HOST"),
|
||||
database=getenv("POSTGRES_DB"),
|
||||
user=getenv("POSTGRES_USER"),
|
||||
password=getenv("POSTGRES_PASSWORD")
|
||||
)
|
||||
|
||||
|
||||
def create_table(name):
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
cur.execute(sql.SQL("CREATE TABLE {table} (row_number SERIAL PRIMARY KEY)").format(table=sql.Identifier(name)))
|
||||
conn.commit()
|
||||
|
||||
|
||||
def add_column(name, column, type):
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
cur.execute(sql.SQL("ALTER TABLE {table} ADD {column}" + type).format(
|
||||
table=sql.Identifier(name),
|
||||
column=sql.Identifier(column))
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def insert_columns(name, data):
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
str = "(" + "DEFAULT, %s," * (len(data) - 1) + "%s" + ")" # TODO: change.
|
||||
cur.execute(sql.SQL("INSERT INTO {table} VALUES" + str).format(
|
||||
table=sql.Identifier(name)),
|
||||
data
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def edit_columns(name, columns, data, id):
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
i = 0
|
||||
for column in columns:
|
||||
cur.execute(sql.SQL("UPDATE {table} SET {col} = %s WHERE row_number = " + id).format(
|
||||
table=sql.Identifier(name),
|
||||
col=sql.Identifier(column)),
|
||||
[data[i]]
|
||||
)
|
||||
i += 1
|
||||
conn.commit()
|
||||
|
||||
|
||||
def remove_column(name, column):
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
cur.execute(sql.SQL("ALTER TABLE {table} DROP COLUMN {column}").format(
|
||||
table=sql.Identifier(name),
|
||||
column=sql.Identifier(column))
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def create_sort(name):
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
sql.SQL("CREATE TABLE {table} (property TEXT, _order CHAR(3), priority int)").format(
|
||||
table=sql.Identifier(name + "_sort")
|
||||
)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def add_sort(name, property, order, priority):
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
sql.SQL("INSERT INTO {table} VALUES (%s, %s, %s)").format(table=sql.Identifier(name + "_sort")),
|
||||
(property, order, priority)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def sort(name):
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
sql.SQL("SELECT * FROM {table} ORDER BY priority").format(table=sql.Identifier(name + "_sort")),
|
||||
)
|
||||
order_clause = "ORDER BY "
|
||||
i = 0
|
||||
for sort in cur:
|
||||
if i > 0:
|
||||
order_clause += ", "
|
||||
order_clause += sort[0] + " " + sort[1]
|
||||
i += 1
|
||||
cur.execute(
|
||||
sql.SQL("SELECT * FROM {table} " + order_clause).format(table=sql.Identifier(name)),
|
||||
)
|
||||
return cur
|
||||
|
||||
|
||||
def add_function():
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE OR REPLACE FUNCTION trigger_function()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
DECLARE
|
||||
name text := TG_ARGV[0]::text;
|
||||
BEGIN
|
||||
IF NEW.property NOT IN (
|
||||
SELECT column_name
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE TABLE_NAME = name)
|
||||
THEN
|
||||
RAISE EXCEPTION 'ERROR %', NEW.property;
|
||||
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$;
|
||||
"""
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def add_filter_trigger(name):
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
sql.SQL(
|
||||
"""
|
||||
CREATE TRIGGER {filter}
|
||||
BEFORE INSERT OR UPDATE
|
||||
ON {filter}
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE trigger_function({table});
|
||||
"""
|
||||
).format(table=sql.Identifier(name), filter=sql.Identifier(name + "_filter"))
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def create_filter(name):
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
cur.execute(sql.SQL("""
|
||||
CREATE TABLE {table} (
|
||||
property TEXT,
|
||||
value TEXT,
|
||||
function TEXT CHECK (function IN ('c', 'e', 'n'))
|
||||
)
|
||||
""").format(table=sql.Identifier(name + "_filter")))
|
||||
conn.commit()
|
||||
|
||||
|
||||
def add_filter(name, property, value, function):
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
sql.SQL("INSERT INTO {table} VALUES (%s, %s, %s)").format(table=sql.Identifier(name + "_filter")),
|
||||
(property, value, function)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def filter(name):
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
sql.SQL("SELECT * FROM {table}").format(table=sql.Identifier(name + "_filter")),
|
||||
)
|
||||
filter_clause = "WHERE "
|
||||
i = 0
|
||||
for sort in cur:
|
||||
if i > 0:
|
||||
filter_clause += " AND "
|
||||
filter_clause += sort[0]
|
||||
match sort[2]:
|
||||
case 'e':
|
||||
filter_clause += " = '" + sort[1] + "'"
|
||||
case 'ne':
|
||||
filter_clause += " <> '" + sort[1] + "'"
|
||||
case 'le':
|
||||
filter_clause += " <= " + sort[1]
|
||||
case 'ge':
|
||||
filter_clause += " >= " + sort[1]
|
||||
case 'l':
|
||||
filter_clause += " < " + sort[1]
|
||||
case 'g':
|
||||
filter_clause += " > " + sort[1]
|
||||
case 'c':
|
||||
filter_clause += " ILIKE '%" + sort[1] + "'"
|
||||
case '_':
|
||||
raise "Invalid filter function"
|
||||
i += 1
|
||||
cur.execute(
|
||||
sql.SQL("SELECT * FROM {table} " + filter_clause).format(table=sql.Identifier(name)),
|
||||
)
|
||||
return cur
|
|
@ -26,3 +26,25 @@ services:
|
|||
POSTGRES_DB: bd2
|
||||
volumes:
|
||||
- ./postgres_data:/var/lib/postgresql/data
|
||||
|
||||
elasticsearch:
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:8.4.2
|
||||
container_name: bsition-elasticsearch
|
||||
environment:
|
||||
- xpack.security.enabled=false
|
||||
- discovery.type=single-node
|
||||
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
|
||||
ulimits:
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
nofile:
|
||||
soft: 65536
|
||||
hard: 65536
|
||||
cap_add:
|
||||
- IPC_LOCK
|
||||
volumes:
|
||||
- ./elasticsearch_data:/usr/share/elasticsearch/data
|
||||
ports:
|
||||
- "9200:9200"
|
||||
- "9300:9300"
|
||||
|
|
|
@ -15,6 +15,14 @@ doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
|
|||
test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"]
|
||||
trio = ["trio (>=0.16,<0.22)"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2022.9.24"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "click"
|
||||
version = "8.1.3"
|
||||
|
@ -50,6 +58,36 @@ idna = ["idna (>=2.1,<4.0)"]
|
|||
trio = ["trio (>=0.14,<0.20)"]
|
||||
wmi = ["wmi (>=1.5.1,<2.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "elastic-transport"
|
||||
version = "8.4.0"
|
||||
description = "Transport classes and utilities shared among Python Elastic client libraries"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
certifi = "*"
|
||||
urllib3 = ">=1.26.2,<2"
|
||||
|
||||
[package.extras]
|
||||
develop = ["aiohttp", "mock", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "trustme"]
|
||||
|
||||
[[package]]
|
||||
name = "elasticsearch"
|
||||
version = "8.5.0"
|
||||
description = "Python client for Elasticsearch"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6, <4"
|
||||
|
||||
[package.dependencies]
|
||||
elastic-transport = ">=8,<9"
|
||||
|
||||
[package.extras]
|
||||
async = ["aiohttp (>=3,<4)"]
|
||||
requests = ["requests (>=2.4.0,<3.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "email-validator"
|
||||
version = "1.3.0"
|
||||
|
@ -213,6 +251,19 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "1.26.12"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
|
||||
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.19.0"
|
||||
|
@ -270,13 +321,17 @@ python-versions = ">=3.7"
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "315626847a4a249371e4019e6d8621f9d1c8beb70089afd3c900c7a74d05de3b"
|
||||
content-hash = "ec169a11aede0c06db95820fd011d9a9724df08569901d9ed87c1e6235cb3ae6"
|
||||
|
||||
[metadata.files]
|
||||
anyio = [
|
||||
{file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"},
|
||||
{file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"},
|
||||
]
|
||||
certifi = [
|
||||
{file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"},
|
||||
{file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"},
|
||||
]
|
||||
click = [
|
||||
{file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
|
||||
{file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
|
||||
|
@ -289,6 +344,14 @@ dnspython = [
|
|||
{file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"},
|
||||
{file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"},
|
||||
]
|
||||
elastic-transport = [
|
||||
{file = "elastic-transport-8.4.0.tar.gz", hash = "sha256:b9ad708ceb7fcdbc6b30a96f886609a109f042c0b9d9f2e44403b3133ba7ff10"},
|
||||
{file = "elastic_transport-8.4.0-py3-none-any.whl", hash = "sha256:19db271ab79c9f70f8c43f8f5b5111408781a6176b54ab2e54d713b6d9ceb815"},
|
||||
]
|
||||
elasticsearch = [
|
||||
{file = "elasticsearch-8.5.0-py3-none-any.whl", hash = "sha256:b478307fedab69966f569a9643fdcedb5c09ba1e9d09dc36e5579c597669bd8e"},
|
||||
{file = "elasticsearch-8.5.0.tar.gz", hash = "sha256:47cfc484ebca07371a9dbd9ce333c55f450daf0790a799944a91234df3d34c5a"},
|
||||
]
|
||||
email-validator = [
|
||||
{file = "email_validator-1.3.0-py2.py3-none-any.whl", hash = "sha256:816073f2a7cffef786b29928f58ec16cdac42710a53bb18aa94317e3e145ec5c"},
|
||||
{file = "email_validator-1.3.0.tar.gz", hash = "sha256:553a66f8be2ec2dea641ae1d3f29017ab89e9d603d4a25cdaac39eefa283d769"},
|
||||
|
@ -532,6 +595,10 @@ typing-extensions = [
|
|||
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
|
||||
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
|
||||
]
|
||||
urllib3 = [
|
||||
{file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"},
|
||||
{file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
|
||||
]
|
||||
uvicorn = [
|
||||
{file = "uvicorn-0.19.0-py3-none-any.whl", hash = "sha256:cc277f7e73435748e69e075a721841f7c4a95dba06d12a72fe9874acced16f6f"},
|
||||
{file = "uvicorn-0.19.0.tar.gz", hash = "sha256:cf538f3018536edb1f4a826311137ab4944ed741d52aeb98846f52215de57f25"},
|
||||
|
|
|
@ -13,6 +13,7 @@ PyJWT = "^2.6.0"
|
|||
pydantic = {extras = ["email"], version = "^1.10.2"}
|
||||
pymongo = {extras = ["srv"], version = "^4.3.2"}
|
||||
psycopg2 = "^2.9.5"
|
||||
elasticsearch = "^8.5.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
|
|
Loading…
Reference in New Issue