Skip to content
Snippets Groups Projects
Commit ec066f7c authored by julian's avatar julian
Browse files

cleaned up project: removed redundancy, renamed compose files so vscode...

cleaned up project: removed redundancy, renamed compose files so vscode recognizes them as compose files
parent 4d7427fa
Branches
No related tags found
No related merge requests found
......@@ -4,9 +4,10 @@ PGDATABASE=postgres
PGHOST=postgres
# pg_inserter
BATCH_SIZE=10000
BATCH_SIZE=5000
EDGE_INSERTS=EDGE_INSERTS
UUID_EDGES=
SINGLE_STATEMENT_NODES=
# pgadmin
PGADMIN_CONFIG_SERVER_MODE=False
......
FROM python:3
WORKDIR /app
RUN pip install "psycopg[binary,pool]"
COPY cdm_sql_queries.py queries.sql ./
# ENV PGHOST= PGDATABASE= PGUSER= PGPASSFILE=
CMD ["python", "-u", "cdm_sql_queries.py"]
\ No newline at end of file
CMD ["python", "-u", "script.py"]
File moved
File moved
File moved
File moved
File moved
File moved
File moved
File moved
services:
postgres:
volumes:
- ./data/theia.zip:/data.zip:ro
- ./postgres/initdb/01-initdb_load_data.sql:/docker-entrypoint-initdb.d/01-initdb.sql:ro
services:
pg_insert:
build: insert
x-pg_base:
build: .
environment:
- PGHOST
- PGDATABASE
- PGUSER
- PGPASSFILE=/run/secrets/pgpass
profiles:
- experiment
secrets:
- pgpass
pg_insert:
extends: x-pg_base
environment:
- EDGE_INSERTS
- UUID_EDGES
- BATCH_SIZE
- SINGLE_STATEMENT_NODES
- PGPASSFILE=/run/secrets/pgpass
profiles:
- experiment
volumes:
- ./data/cadets.zip:/data.zip:ro
secrets:
- pgpass
- ./scripts/pg_insert.py:/app/script.py:ro
depends_on:
postgres:
condition: service_healthy
restart: true
pg_query:
build: query
environment:
- PGHOST
- PGDATABASE
- PGUSER
- PGPASSFILE=/run/secrets/pgpass
profiles:
- experiment
secrets:
- pgpass
extends: x-pg_base
volumes:
- ./scripts/pg_query.py:/app/script.py:ro
depends_on:
- pg_insert
postgres:
build: postgres
command: -c config_file=/postgres_conf
shm_size: 20gb
shm_size: 10gb
hostname: $PGHOST
environment:
- PGUSER
......@@ -50,7 +48,6 @@ services:
start_period: 5s
start_interval: 1s
volumes:
- ./data/theia.zip:/data.zip:ro
- ./postgres/initdb/initdb.py:/docker-entrypoint-initdb.d/initdb.py:ro
- ./postgres/initdb/00-initdb.sql:/docker-entrypoint-initdb.d/00-initdb.sql:ro
configs:
......
FROM python:3
WORKDIR /app
RUN pip install "psycopg[binary,pool]"
COPY pg_insert.py ./
# ENV PGHOST= PGDATABASE= PGUSER= PGPASSFILE= BATCH_SIZE= EDGE_INSERTS= UUID_EDGES=
CMD ["python", "-u", "pg_insert.py"]
......@@ -28,56 +28,78 @@ if __name__ == "__main__":
result_dir = "result"
if not path.exists(result_dir):
makedirs(result_dir)
compose_default = ("compose.yml", "compose.edge-id.yml", "compose.index.yml")
env_default = {"BATCH_SIZE": 5000}
experiments: Sequence[dict] = (
{
"name": "experiment_batch1",
"compose_files": ("compose.yml", "edge-id.yml", "index.yml"),
"compose_files": compose_default,
"env": {"BATCH_SIZE": 1},
},
{
"name": "experiment_batch10",
"compose_files": ("compose.yml", "edge-id.yml", "index.yml"),
"compose_files": compose_default,
"env": {"BATCH_SIZE": 10},
},
{
"name": "experiment_batch100",
"compose_files": ("compose.yml", "edge-id.yml", "index.yml"),
"compose_files": compose_default,
"env": {"BATCH_SIZE": 100},
},
{
"name": "experiment_batch1000",
"compose_files": ("compose.yml", "edge-id.yml", "index.yml"),
"compose_files": compose_default,
"env": {"BATCH_SIZE": 1000},
},
{
"name": "experiment_no_uuid_index",
"compose_files": ("compose.yml", "edge-id-no-index.yml", "index.yml"),
"env": {"BATCH_SIZE": 5000},
"name": "experiment_default",
"compose_files": compose_default,
"env": env_default,
},
{
"name": "experiment_default",
"compose_files": ("compose.yml", "edge-id.yml", "index.yml"),
"env": {"BATCH_SIZE": 5000},
"name": "experiment_no_uuid_index",
"compose_files": (
"compose.yml",
"compose.edge-id-no-index.yml",
"compose.index.yml",
),
"env": env_default,
},
{
"name": "experiment_hash_index",
"compose_files": ("compose.yml", "edge-id.yml", "index-hash.yml"),
"env": {"BATCH_SIZE": 5000},
"compose_files": (
"compose.yml",
"compose.edge-id.yml",
"compose.index-hash.yml",
),
"env": env_default,
},
{
"name": "experiment_multicolumn_index",
"compose_files": ("compose.yml", "edge-id.yml", "index-multi.yml"),
"env": {"BATCH_SIZE": 5000},
"compose_files": (
"compose.yml",
"compose.edge-id.yml",
"compose.index-multi.yml",
),
"env": env_default,
},
{
"name": "experiment_triggers",
"compose_files": ("compose.yml", "edge-id-triggers.yml", "index.yml"),
"env": {"BATCH_SIZE": 5000},
"compose_files": (
"compose.yml",
"compose.edge-id-triggers.yml",
"compose.index.yml",
),
"env": env_default,
},
{
"name": "experiment_uuid",
"compose_files": ("compose.yml", "edge-uuid.yml", "index.yml"),
"env": {"BATCH_SIZE": 5000},
"compose_files": (
"compose.yml",
"compose.edge-uuid.yml",
"compose.index.yml",
),
"env": env_default,
},
)
# environ["SINGLE_STATEMENT_NODES"]="SINGLE_STATEMENT_NODES"
......
File moved
File moved
......@@ -51,7 +51,7 @@ if __name__ == "__main__":
if not path.exists(log_dir):
makedirs(log_dir)
with open(csv_path, "w") as file:
csv.writer(file).writerow(("TIMESTAMP", "VERTEX_COUNT"))
csv.writer(file).writerow(("TIME_ELAPSED", "VERTEX_COUNT"))
info("Starting...")
vertex_counts = timed_average(10)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment