Run pre-commit

Co-authored-by: Ezequiel Bellver <ebellver@itba.edu.ar>
Co-authored-by: Juan Barmasch <jbarmasch@itba.edu.ar>
This commit is contained in:
Santiago Lo Coco 2022-12-13 08:15:02 -03:00
parent 9ffd45dfb6
commit cd27603712
8 changed files with 57 additions and 46 deletions

View File

@ -4,8 +4,8 @@ from fastapi import APIRouter
from fastapi.responses import JSONResponse
from bsition.api.models.document import Document, DocumentUpdate
from bsition.backend.mongo import documents as mongo
from bsition.backend.elastic import search as elastic
from bsition.backend.mongo import documents as mongo
router = APIRouter()

View File

@ -4,8 +4,8 @@ from fastapi.security import OAuth2PasswordRequestForm
from bsition.api.utils.jwt import write_token
from bsition.api.utils.password import verify_password
from bsition.backend.redis.tokens import add_token
from bsition.backend.postgres.users import get_user_by_username
from bsition.backend.redis.tokens import add_token
router = APIRouter()

View File

@ -4,8 +4,8 @@ from fastapi.responses import JSONResponse
from bsition.api.models.user import User
from bsition.api.utils.password import get_hashed_password
from bsition.api.utils.security import get_current_user
from bsition.backend.redis import tokens as redis
from bsition.backend.postgres import users as postgres
from bsition.backend.redis import tokens as redis
router = APIRouter()

View File

@ -4,8 +4,8 @@ from jwt import exceptions
from starlette import status
from bsition.api.utils.jwt import validate_token
from bsition.backend.redis.tokens import valid_token
from bsition.backend.postgres.users import get_user_by_username
from bsition.backend.redis.tokens import valid_token
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="api/token")

View File

@ -1,8 +1,8 @@
from dotenv import load_dotenv
from bsition.backend.elastic.utils import create_index
from bsition.backend.postgres.users import create_user_table
from bsition.backend.postgres.tables import add_function
from bsition.backend.postgres.users import create_user_table
def configure():

View File

@ -16,9 +16,9 @@ def insert_columns(id, name, type, data):
new_column_names.append(name)
new_types = doc["types"].copy()
new_types.append(type)
docs_coll.update_one({"_id": id}, {"$set": {
"column_names": new_column_names, "types": new_types}
})
docs_coll.update_one(
{"_id": id}, {"$set": {"column_names": new_column_names, "types": new_types}}
)
def edit_column(row_number, column, data, id):
@ -31,9 +31,7 @@ def edit_column(row_number, column, data, id):
i = column_names.index(column)
new_data[row_number]["column_data"][i] = data
docs_coll.update_one({"_id": id}, {"$set": {
"data": new_data}
})
docs_coll.update_one({"_id": id}, {"$set": {"data": new_data}})
def sort(id):
@ -45,14 +43,16 @@ def sort(id):
sort_values = postgres.get_sort(id)
obj = {}
for sort in sort_values:
obj['data.column_data.' + str(column_names.index(sort[1]))] = 1 if sort[2] == 'ASC' else -1
obj["data.column_data." + str(column_names.index(sort[1]))] = (
1 if sort[2] == "ASC" else -1
)
pipeline = [
{"$match": {"_id": id}},
{"$unwind": '$data'},
{"$unwind": "$data"},
{"$sort": obj},
{"$group": {"_id": '$_id', 'aux': {"$push": '$data'}}},
{"$project": {'data': '$aux'}}
{"$group": {"_id": "$_id", "aux": {"$push": "$data"}}},
{"$project": {"data": "$aux"}},
]
return list(docs_coll.aggregate(pipeline))
@ -68,42 +68,43 @@ def filter(id):
for filter in filter_values:
match filter[3]:
case "e":
obj['data.column_data.' + str(column_names.index(filter[1]))] = {
obj["data.column_data." + str(column_names.index(filter[1]))] = {
"$eq": filter[2]
}
case "ne":
obj['data.column_data.' + str(column_names.index(filter[1]))] = {
obj["data.column_data." + str(column_names.index(filter[1]))] = {
"$ne": filter[2]
}
case "le":
obj['data.column_data.' + str(column_names.index(filter[1]))] = {
obj["data.column_data." + str(column_names.index(filter[1]))] = {
"$lte": filter[2]
}
case "ge":
obj['data.column_data.' + str(column_names.index(filter[1]))] = {
obj["data.column_data." + str(column_names.index(filter[1]))] = {
"$gte": filter[2]
}
case "l":
obj['data.column_data.' + str(column_names.index(filter[1]))] = {
obj["data.column_data." + str(column_names.index(filter[1]))] = {
"$lt": filter[2]
}
case "g":
obj['data.column_data.' + str(column_names.index(filter[1]))] = {
obj["data.column_data." + str(column_names.index(filter[1]))] = {
"$gt": filter[2]
}
case "c":
obj['data.column_data.' + str(column_names.index(filter[1]))] = {
"$regex": ".*" + filter[2] + ".*", "$options": "i"
obj["data.column_data." + str(column_names.index(filter[1]))] = {
"$regex": ".*" + filter[2] + ".*",
"$options": "i",
}
case "_":
raise "Invalid filter function"
pipeline = [
{"$match": {"_id": id}},
{"$unwind": '$data'},
{"$unwind": "$data"},
{"$match": obj},
{"$group": {"_id": '$_id', 'aux': {"$push": '$data'}}},
{"$project": {'data': '$aux'}}
{"$group": {"_id": "$_id", "aux": {"$push": "$data"}}},
{"$project": {"data": "$aux"}},
]
return list(docs_coll.aggregate(pipeline))
@ -121,6 +122,7 @@ def remove_column(id, column):
for row in data:
del row["column_data"][idx]
docs_coll.update_one({"_id": id}, {"$set": {
"column_names": column_names, "types": types, "data": data}
})
docs_coll.update_one(
{"_id": id},
{"$set": {"column_names": column_names, "types": types, "data": data}},
)

View File

@ -7,24 +7,28 @@ def create_relations_tables():
conn = get_connection()
cur = conn.cursor()
cur.execute(
sql.SQL("""
sql.SQL(
"""
CREATE TABLE table_access (
user_id INTEGER REFERENCES users(id),
table_id INTEGER,
access_type INTEGER CHECK (access_type IN (1, 2, 3)),
PRIMARY KEY (user_id, table_id)
PRIMARY KEY (user_id, table_id)
)
""")
"""
)
)
cur.execute(
sql.SQL("""
sql.SQL(
"""
CREATE TABLE doc_access (
user_id INTEGER REFERENCES users(id),
doc_id INTEGER,
access_type INTEGER CHECK (access_type IN (1, 2, 3)),
PRIMARY KEY (user_id, doc_id)
PRIMARY KEY (user_id, doc_id)
)
""")
"""
)
)
conn.commit()
@ -44,17 +48,19 @@ def give_access(user_id, id, access_type, destination):
conn = get_connection()
cur = conn.cursor()
cur.execute(
sql.SQL("""
INSERT INTO {destination_name} (user_id, {destination_id}, access_type)
sql.SQL(
"""
INSERT INTO {destination_name} (user_id, {destination_id}, access_type)
VALUES ({user_id}, {id}, {access_type})
ON CONFLICT (user_id, {destination_id}) DO UPDATE
ON CONFLICT (user_id, {destination_id}) DO UPDATE
SET access_type = {access_type}
""").format(
"""
).format(
user_id=sql.Literal(user_id),
destination_name=sql.Identifier(destination + "_access"),
destination_id=sql.Identifier(destination + "_id"),
access_type=sql.Literal(access_type),
id=sql.Literal(id)
id=sql.Literal(id),
)
)
conn.commit()
@ -74,11 +80,13 @@ def has_access(user_id, id, destination):
conn = get_connection()
cur = conn.cursor()
cur.execute(
sql.SQL("SELECT access_type FROM {destination_access} WHERE user_id = {user_id} AND {destination_id} = {id}").format(
sql.SQL(
"SELECT access_type FROM {destination_access} WHERE user_id = {user_id} AND {destination_id} = {id}"
).format(
user_id=sql.Literal(user_id),
destination_access=sql.Identifier(destination + "_access"),
destination_id=sql.Identifier(destination + "_id"),
id=sql.Literal(id)
id=sql.Literal(id),
)
)
return list(cur.fetchall())
@ -99,11 +107,13 @@ def deny_access(user_id, id, destination):
conn = get_connection()
cur = conn.cursor()
cur.execute(
sql.SQL("DELETE FROM {destination_access} WHERE user_id = {user_id} AND {destination_id} = {id}").format(
sql.SQL(
"DELETE FROM {destination_access} WHERE user_id = {user_id} AND {destination_id} = {id}"
).format(
user_id=sql.Literal(user_id),
destination_access=sql.Identifier(destination + "_access"),
destination_id=sql.Identifier(destination + "_id"),
id=sql.Literal(id)
id=sql.Literal(id),
)
)
conn.commit()

View File

@ -96,8 +96,7 @@ def add_filter(id, property, value, function):
conn = get_connection()
cur = conn.cursor()
cur.execute(
sql.SQL("INSERT INTO t_filter VALUES (%s, %s, %s, %s)").format(
),
sql.SQL("INSERT INTO t_filter VALUES (%s, %s, %s, %s)").format(),
(id, property, value, function),
)
conn.commit()