Remove unused code
This commit is contained in:
parent
fd33eccc7c
commit
b3cc4d4cc3
|
@ -5,11 +5,6 @@ from src import create_app, db
|
|||
from src.api.models.users import User
|
||||
from src.config import ProductionConfig
|
||||
|
||||
# from pactman import Consumer, Provider
|
||||
|
||||
|
||||
# from src.tests.client.client import UsersClient
|
||||
|
||||
PACT_DIR = "src/tests/pacts"
|
||||
|
||||
|
||||
|
@ -18,7 +13,7 @@ def test_app():
|
|||
app = create_app()
|
||||
app.config.from_object("src.config.TestingConfig")
|
||||
with app.app_context():
|
||||
yield app # testing happens here
|
||||
yield app
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
|
@ -31,27 +26,11 @@ def test_namespace():
|
|||
def test_database():
|
||||
db.drop_all()
|
||||
db.create_all()
|
||||
yield db # testing happens here
|
||||
yield db
|
||||
db.session.remove()
|
||||
db.drop_all()
|
||||
|
||||
|
||||
# @pytest.fixture(scope="function")
|
||||
# def pact():
|
||||
# pact = Consumer("UsersConsumer").has_pact_with(
|
||||
# Provider("UsersProvider"), pact_dir=PACT_DIR
|
||||
# )
|
||||
# pact.start_service()
|
||||
# yield pact
|
||||
# pact.stop_service()
|
||||
|
||||
|
||||
# @pytest.fixture(scope="function")
|
||||
# def user_client(pact):
|
||||
# cli = UsersClient(uri=pact.uri)
|
||||
# yield cli
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def prod_config():
|
||||
yield ProductionConfig()
|
||||
|
|
|
@ -37,13 +37,11 @@ def test_not_registered_user_login(test_app, test_database):
|
|||
def test_valid_refresh(test_app, test_database, add_user):
|
||||
add_user(TEST_USERNAME, TEST_EMAIL, TEST_PASSWD)
|
||||
client = test_app.test_client()
|
||||
# user login
|
||||
resp_login = client.post(
|
||||
"/auth/login",
|
||||
data=json.dumps({"email": TEST_EMAIL, "password": TEST_PASSWD}),
|
||||
content_type="application/json",
|
||||
)
|
||||
# valid refresh
|
||||
refresh_token = json.loads(resp_login.data.decode())["refresh_token"]
|
||||
resp = client.post(
|
||||
"/auth/refresh",
|
||||
|
@ -60,13 +58,11 @@ def test_valid_refresh(test_app, test_database, add_user):
|
|||
def test_invalid_refresh_expired_token(test_app, test_database, add_user):
|
||||
add_user("test5", "test5@test.com", "test")
|
||||
client = test_app.test_client()
|
||||
# user login
|
||||
resp_login = client.post(
|
||||
"/auth/login",
|
||||
data=json.dumps({"email": "test5@test.com", "password": "test"}),
|
||||
content_type="application/json",
|
||||
)
|
||||
# invalid token refresh
|
||||
time.sleep(10)
|
||||
refresh_token = json.loads(resp_login.data.decode())["refresh_token"]
|
||||
resp = client.post(
|
||||
|
|
|
@ -30,7 +30,6 @@ def test_production_config(test_app, monkeypatch):
|
|||
"DATABASE_URL", "postgresql://postgres:postgres@api-db:5432/api_users"
|
||||
)
|
||||
test_app.config.from_object(ProductionConfig())
|
||||
# assert test_app.config["SECRET_KEY"] == "my_precious"
|
||||
assert not test_app.config["TESTING"]
|
||||
assert test_app.config["SQLALCHEMY_DATABASE_URI"] == os.environ.get("DATABASE_URL")
|
||||
assert test_app.config["BCRYPT_LOG_ROUNDS"] == 13
|
||||
|
|
|
@ -17,12 +17,10 @@ instance.interceptors.request.use((request) => {
|
|||
|
||||
instance.interceptors.response.use(
|
||||
(response) => {
|
||||
console.log(response.headers)
|
||||
if (response.headers["x-count"]) {
|
||||
let json: any = {}
|
||||
json["flights"] = JSON.parse(response.data);
|
||||
json["count"] = response.headers["x-count"]
|
||||
console.log(json)
|
||||
return json
|
||||
} else if (response.status === 204) {
|
||||
return response;
|
||||
|
|
|
@ -48,8 +48,6 @@ export const Card: React.FC<CardProps> = ({ flight, user, subscribed, refresh, r
|
|||
flight_id: flight.id
|
||||
}
|
||||
|
||||
console.log(data)
|
||||
|
||||
subscribeToFlight(data, token)
|
||||
.then(() => {
|
||||
refresh()
|
||||
|
@ -85,7 +83,7 @@ export const Card: React.FC<CardProps> = ({ flight, user, subscribed, refresh, r
|
|||
refreshFlights()
|
||||
})
|
||||
.catch((error) => {
|
||||
console.log(error)
|
||||
// console.log(error)
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -106,20 +104,15 @@ export const Card: React.FC<CardProps> = ({ flight, user, subscribed, refresh, r
|
|||
flight_id: flight.id
|
||||
}
|
||||
|
||||
console.log(data)
|
||||
|
||||
unsubscribeFromFlight(data, token)
|
||||
.then(() => {
|
||||
refresh()
|
||||
})
|
||||
.catch((error) => {
|
||||
console.log(error)
|
||||
// console.log(error)
|
||||
});
|
||||
};
|
||||
|
||||
console.log(flight.user_id)
|
||||
console.log(user?.id)
|
||||
|
||||
return (
|
||||
<div className="flight-card">
|
||||
<Space size={8} align="center">
|
||||
|
|
|
@ -8,7 +8,6 @@ export const LogIn = () => {
|
|||
const [email, setEmail] = useState("");
|
||||
const [password, setPassword] = useState("");
|
||||
const navigate = useNavigate();
|
||||
console.log(error)
|
||||
|
||||
return (
|
||||
<div className="Box Small">
|
||||
|
|
|
@ -106,8 +106,6 @@ def update_flight(db: Session, update_data, id):
|
|||
db_flight = db.query(Flight).filter(Flight.id == id).first()
|
||||
if db_flight is None:
|
||||
raise KeyError
|
||||
# if db_flight.user_id != update_data["user_id"] and role != "admin":
|
||||
# raise PermissionError
|
||||
|
||||
new_flight = Flight(
|
||||
**{
|
||||
|
|
|
@ -42,23 +42,6 @@ async def create_flight(
|
|||
return response
|
||||
|
||||
|
||||
# @router.delete("/{id}")
|
||||
# async def delete_flight(
|
||||
# id: int,
|
||||
# req: Request,
|
||||
# authorization: Annotated[str | None, Header()] = None,
|
||||
# ):
|
||||
# id = await checkAuth(req, authorization, isAirline=True)
|
||||
# request_id = req.state.request_id
|
||||
# header = {"x-api-request-id": request_id}
|
||||
# (response, status, _) = await request(
|
||||
# f"{API_FLIGHTS}/{id}", "DELETE", headers=header
|
||||
# )
|
||||
# if status < 200 or status > 204:
|
||||
# raise HTTPException(status_code=status, detail=response)
|
||||
# return response
|
||||
|
||||
|
||||
@router.patch("/{id}", response_model=Flight)
|
||||
async def update_flight(
|
||||
id: int,
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
# Cambio
|
||||
|
||||
Dentro de kibana:
|
||||
|
||||
Stack Managment > Roles > logstash_writer
|
||||
|
||||
Agregar los permisos que se deseen a los indices que se van a usar, en el caso del codigo de ejemplo, los logs tienen el formato `trips_*`.
|
|
@ -1,13 +1,6 @@
|
|||
---
|
||||
## Default Elasticsearch configuration from Elasticsearch base image.
|
||||
## https://github.com/elastic/elasticsearch/blob/main/distribution/docker/src/docker/config/elasticsearch.yml
|
||||
#
|
||||
cluster.name: docker-cluster
|
||||
network.host: 0.0.0.0
|
||||
|
||||
## X-Pack settings
|
||||
## see https://www.elastic.co/guide/en/elasticsearch/reference/current/security-settings.html
|
||||
#
|
||||
xpack.license.self_generated.type: trial
|
||||
xpack.security.enabled: true
|
||||
cluster.routing.allocation.disk.threshold_enabled: false
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
# Curator
|
||||
|
||||
Elasticsearch Curator helps you curate or manage your indices.
|
||||
|
||||
## Usage
|
||||
|
||||
If you want to include the Curator extension, run Docker Compose from the root of the repository with an additional
|
||||
command line argument referencing the `curator-compose.yml` file:
|
||||
|
||||
```bash
|
||||
$ docker-compose -f docker-compose.yml -f extensions/curator/curator-compose.yml up
|
||||
```
|
||||
|
||||
This sample setup demonstrates how to run `curator` every minute using `cron`.
|
||||
|
||||
All configuration files are available in the `config/` directory.
|
||||
|
||||
## Documentation
|
||||
|
||||
[Curator Reference](https://www.elastic.co/guide/en/elasticsearch/client/curator/current/index.html)
|
|
@ -1,41 +0,0 @@
|
|||
# Heartbeat
|
||||
|
||||
Heartbeat is a lightweight daemon that periodically checks the status of your services and determines whether they are
|
||||
available.
|
||||
|
||||
## Usage
|
||||
|
||||
**This extension requires the `heartbeat_internal` and `beats_system` users to be created and initialized with a
|
||||
password.** In case you haven't done that during the initial startup of the stack, please refer to [How to re-execute
|
||||
the setup][setup] to run the setup container again and initialize these users.
|
||||
|
||||
To include Heartbeat in the stack, run Docker Compose from the root of the repository with an additional command line
|
||||
argument referencing the `heartbeat-compose.yml` file:
|
||||
|
||||
```console
|
||||
$ docker-compose -f docker-compose.yml -f extensions/heartbeat/heartbeat-compose.yml up
|
||||
```
|
||||
|
||||
## Configuring Heartbeat
|
||||
|
||||
The Heartbeat configuration is stored in [`config/heartbeat.yml`](./config/heartbeat.yml). You can modify this file
|
||||
with the help of the [Configuration reference][heartbeat-config].
|
||||
|
||||
Any change to the Heartbeat configuration requires a restart of the Heartbeat container:
|
||||
|
||||
```console
|
||||
$ docker-compose -f docker-compose.yml -f extensions/heartbeat/heartbeat-compose.yml restart heartbeat
|
||||
```
|
||||
|
||||
Please refer to the following documentation page for more details about how to configure Heartbeat inside a
|
||||
Docker container: [Run Heartbeat on Docker][heartbeat-docker].
|
||||
|
||||
## See also
|
||||
|
||||
[Heartbeat documentation][heartbeat-doc]
|
||||
|
||||
[heartbeat-config]: https://www.elastic.co/guide/en/beats/heartbeat/current/heartbeat-reference-yml.html
|
||||
[heartbeat-docker]: https://www.elastic.co/guide/en/beats/heartbeat/current/running-on-docker.html
|
||||
[heartbeat-doc]: https://www.elastic.co/guide/en/beats/heartbeat/current/index.html
|
||||
|
||||
[setup]: ../../README.md#how-to-re-execute-the-setup
|
|
@ -1,7 +1,3 @@
|
|||
## Heartbeat configuration
|
||||
## https://github.com/elastic/beats/blob/main/deploy/docker/heartbeat.docker.yml
|
||||
#
|
||||
|
||||
name: heartbeat
|
||||
|
||||
heartbeat.monitors:
|
||||
|
@ -19,13 +15,6 @@ heartbeat.monitors:
|
|||
hosts:
|
||||
- elasticsearch
|
||||
|
||||
# - type: http
|
||||
# name: dummy
|
||||
# schedule: '@every 5s'
|
||||
# check.response.status: [200]
|
||||
# urls:
|
||||
# - http://localhost:5000
|
||||
|
||||
processors:
|
||||
- add_cloud_metadata: ~
|
||||
|
||||
|
@ -40,10 +29,6 @@ output.elasticsearch:
|
|||
username: heartbeat_internal
|
||||
password: ${HEARTBEAT_INTERNAL_PASSWORD}
|
||||
|
||||
## HTTP endpoint for health checking
|
||||
## https://www.elastic.co/guide/en/beats/heartbeat/current/http-endpoint.html
|
||||
#
|
||||
|
||||
http:
|
||||
enabled: true
|
||||
host: 0.0.0.0
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
---
|
||||
## Default Kibana configuration from Kibana base image.
|
||||
## https://github.com/elastic/kibana/blob/main/src/dev/build/tasks/os_packages/docker_generator/templates/kibana_yml.template.ts
|
||||
#
|
||||
server.name: marte
|
||||
server.host: 0.0.0.0
|
||||
elasticsearch.hosts: [ http://elasticsearch:9200 ]
|
||||
|
@ -9,27 +6,9 @@ elasticsearch.hosts: [ http://elasticsearch:9200 ]
|
|||
monitoring.ui.container.elasticsearch.enabled: true
|
||||
monitoring.ui.container.logstash.enabled: true
|
||||
|
||||
## X-Pack security credentials
|
||||
#
|
||||
elasticsearch.username: kibana_system
|
||||
elasticsearch.password: ${KIBANA_SYSTEM_PASSWORD}
|
||||
|
||||
## Encryption keys (optional but highly recommended)
|
||||
##
|
||||
## Generate with either
|
||||
## $ docker container run --rm docker.elastic.co/kibana/kibana:8.6.2 bin/kibana-encryption-keys generate
|
||||
## $ openssl rand -hex 32
|
||||
##
|
||||
## https://www.elastic.co/guide/en/kibana/current/using-kibana-with-security.html
|
||||
## https://www.elastic.co/guide/en/kibana/current/kibana-encryption-keys.html
|
||||
#
|
||||
#xpack.security.encryptionKey:
|
||||
#xpack.encryptedSavedObjects.encryptionKey:
|
||||
#xpack.reporting.encryptionKey:
|
||||
|
||||
## Fleet
|
||||
## https://www.elastic.co/guide/en/kibana/current/fleet-settings-kb.html
|
||||
#
|
||||
xpack.fleet.agents.fleet_server.hosts: [ http://fleet-server:8220 ]
|
||||
|
||||
xpack.fleet.outputs:
|
||||
|
@ -83,8 +62,6 @@ xpack.fleet.agentPolicies:
|
|||
- name: apm-1
|
||||
package:
|
||||
name: apm
|
||||
# See the APM package manifest for a list of possible inputs.
|
||||
# https://github.com/elastic/apm-server/blob/v8.5.0/apmpackage/apm/manifest.yml#L41-L168
|
||||
inputs:
|
||||
- type: apm
|
||||
vars:
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
ARG ELASTIC_VERSION
|
||||
|
||||
# https://www.docker.elastic.co/
|
||||
FROM docker.elastic.co/logstash/logstash:${ELASTIC_VERSION}
|
||||
|
||||
COPY config/logstash.yml /usr/share/logstash/config/logstash.yml
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
---
|
||||
## Default Logstash configuration from Logstash base image.
|
||||
## https://github.com/elastic/logstash/blob/main/docker/data/logstash/config/logstash-full.yml
|
||||
#
|
||||
http.host: 0.0.0.0
|
||||
|
||||
node.name: logstash
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
API_URL=http://localhost:5000
|
|
@ -1,82 +0,0 @@
|
|||
{
|
||||
"info": {
|
||||
"_postman_id": "5a442e31-1ca6-4662-bb2a-4a4fdcf89cb2",
|
||||
"name": "Auth",
|
||||
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json",
|
||||
"_exporter_id": "31564770"
|
||||
},
|
||||
"item": [
|
||||
{
|
||||
"name": "Login",
|
||||
"event": [
|
||||
{
|
||||
"listen": "test",
|
||||
"script": {
|
||||
"exec": [
|
||||
"pm.test(\"Status code is 200\", function () {",
|
||||
" pm.response.to.have.status(200);",
|
||||
"});",
|
||||
"",
|
||||
"pm.test(\"Content-Type is present\", function () {",
|
||||
" pm.response.to.have.header(\"Content-Type\");",
|
||||
"});",
|
||||
""
|
||||
],
|
||||
"type": "text/javascript"
|
||||
}
|
||||
}
|
||||
],
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [],
|
||||
"body": {
|
||||
"mode": "raw",
|
||||
"raw": "{\n \"email\": \"info@lufthansa.com\",\n \"password\": \"password1234\"\n}",
|
||||
"options": {
|
||||
"raw": {
|
||||
"language": "json"
|
||||
}
|
||||
}
|
||||
},
|
||||
"url": {
|
||||
"raw": "{{API_URL}}/auth/login",
|
||||
"host": [
|
||||
"{{API_URL}}"
|
||||
],
|
||||
"path": [
|
||||
"auth",
|
||||
"login"
|
||||
]
|
||||
}
|
||||
},
|
||||
"response": []
|
||||
}
|
||||
],
|
||||
"event": [
|
||||
{
|
||||
"listen": "prerequest",
|
||||
"script": {
|
||||
"type": "text/javascript",
|
||||
"exec": [
|
||||
""
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"listen": "test",
|
||||
"script": {
|
||||
"type": "text/javascript",
|
||||
"exec": [
|
||||
""
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"variable": [
|
||||
{
|
||||
"key": "API_URL",
|
||||
"value": "http://localhost:5001",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
FROM node:16-alpine
|
||||
|
||||
ENV LC_ALL="en_US.UTF-8" LANG="en_US.UTF-8" LANGUAGE="en_US.UTF-8" ALPINE_NODE_REPO="oznu/alpine-node"
|
||||
|
||||
RUN npm install --global newman
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY . .
|
||||
RUN chmod +x /usr/src/app/test.sh
|
||||
|
||||
ENTRYPOINT ["/usr/src/app/test.sh"]
|
|
@ -1,9 +0,0 @@
|
|||
version: '3.8'
|
||||
|
||||
services:
|
||||
newman:
|
||||
container_name: fids-testing_newman
|
||||
image: ${API_IMAGE}
|
||||
environment:
|
||||
- API_URL=${API_URL}
|
||||
network_mode: host
|
|
@ -1,3 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
newman run Auth.postman_collection.json
|
Loading…
Reference in New Issue