Skip to content

Commit 8f9e823

Browse files
author
Marek Dlabacek
committedNov 26, 2024·
chore: Rewrite - use PostgreSQL
1 parent 4df8bc0 commit 8f9e823

8 files changed

+429
-104
lines changed
 

‎README.md

+64-16
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,12 @@ redis:
3737
host: "localhost"
3838
port: 6379
3939

40-
sqlite:
41-
db_path: "logs.db"
40+
postgresql:
41+
host: postgres
42+
port: 5432
43+
dbname: mydatabase
44+
user: myuser
45+
password: mypassword
4246

4347
fields:
4448
- http_time
@@ -47,13 +51,46 @@ fields:
4751

4852
pause: 5
4953
log_level: "INFO"
54+
55+
queries:
56+
- name: top_ips_by_cpu
57+
query: >
58+
SELECT http_remote_addr,
59+
SUM(COALESCE(CAST(http_request_time AS numeric), 0)) AS total_time
60+
FROM logs
61+
WHERE
62+
http_time::timestamp(6) > NOW() - INTERVAL '1 hour'
63+
GROUP BY http_remote_addr
64+
ORDER BY total_time DESC
65+
LIMIT 20
66+
redis_key: analysis:top_ips_by_cpu
67+
- name: top_ips_by_post
68+
query: >
69+
SELECT http_remote_addr, COUNT(*) AS post_count
70+
FROM logs
71+
WHERE http_method = 'POST'
72+
GROUP BY http_remote_addr
73+
ORDER BY post_count DESC
74+
LIMIT 20
75+
redis_key: analysis:top_ips_by_post
76+
- name: top_ip_ranges
77+
query: >
78+
SELECT SUBSTR(http_remote_addr, 1, LENGTH(http_remote_addr) - LENGTH(REPLACE(http_remote_addr, '.', '')) - 1) AS ip_range,
79+
COUNT(*) AS request_count
80+
FROM logs
81+
WHERE http_remote_addr LIKE '%.%.%.%'
82+
GROUP BY ip_range
83+
ORDER BY request_count DESC
84+
LIMIT 10
85+
redis_key: analysis:top_ip_ranges
5086
```
5187
5288
- `redis.host` and `redis.port`: Redis server connection details.
53-
- `sqlite.db_path`: Path to the SQLite database file.
89+
- `postgresql`: PostgreSQL server connection details.
5490
- `fields`: List of fields to extract from each log.
5591
- `pause`: Time (in seconds) to wait between processing iterations.
5692
- `log_level`: Logging level (`DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL`).
93+
- `queries`: List of SQL queries to execute and export to Redis.
5794

5895
---
5996

@@ -63,7 +100,11 @@ Override the configuration using environment variables:
63100

64101
- `REDIS_HOST`: Redis server host.
65102
- `REDIS_PORT`: Redis server port.
66-
- `SQLITE_DB_PATH`: Path to the SQLite database.
103+
- `POSTGRESQL_HOST`: PostgreSQL server host.
104+
- `POSTGRESQL_PORT`: PostgreSQL server port.
105+
- `POSTGRESQL_DBNAME`: PostgreSQL database name.
106+
- `POSTGRESQL_USER`: PostgreSQL username.
107+
- `POSTGRESQL_PASSWORD`: PostgreSQL password.
67108
- `FIELDS`: Comma-separated list of fields to extract.
68109
- `PAUSE`: Time (in seconds) to wait between processing iterations.
69110
- `LOG_LEVEL`: Logging level.
@@ -82,7 +123,11 @@ python main.py [options]
82123

83124
- `-r, --redis`: Redis server host.
84125
- `-p, --port`: Redis server port.
85-
- `-d, --db`: Path to the SQLite database.
126+
- `-d, --db`: PostgreSQL database name
127+
- `-u`, `--user`: PostgreSQL username.
128+
- `-w`, `--password`: PostgreSQL password.
129+
- `-P`, `--postgres_port`: PostgreSQL port.
130+
- `-H`, `--postgres_host`: PostgreSQL host.
86131
- `-f, --fields`: Comma-separated list of fields to extract.
87132
- `-t, --time`: Time (in seconds) to wait between processing iterations.
88133
- `-l, --log_level`: Logging level.
@@ -94,7 +139,7 @@ python main.py [options]
94139
### Example Command
95140

96141
```bash
97-
python log_collector.py -r localhost -p 6379 -d logs.db -f http_time,http_status,http_path -t 5 -l INFO
142+
python main.py -r localhost -p 6379 -d mydatabase -u myuser -w mypassword -P 5432 -H postgres -f http_time,http_status,http_path -t 5 -l INFO
98143
```
99144

100145
### Running the Script
@@ -126,14 +171,6 @@ Stores the processed log data.
126171
| `created_at` | TIMESTAMP | Log creation timestamp. |
127172
| `<fields>` | TEXT | Dynamic columns based on config.|
128173

129-
### `processed_index`
130-
Tracks the last processed Redis index.
131-
132-
| Column Name | Type | Description |
133-
|--------------|---------|-------------------------------|
134-
| `key_name` | TEXT | Redis key name (`logs`). |
135-
| `last_index` | INTEGER | Last processed log index. |
136-
137174
---
138175

139176
## Error Handling
@@ -182,8 +219,12 @@ redis:
182219
host: 'localhost'
183220
port: 6379
184221
185-
sqlite:
186-
db_path: 'logs.db'
222+
postgresql:
223+
host: postgres
224+
port: 5432
225+
dbname: mydatabase
226+
user: myuser
227+
password: mypassword
187228
188229
log_level: 'INFO'
189230
@@ -241,3 +282,10 @@ python analyze_logs.py --dry-run
241282
```
242283

243284
This will run the analysis but not save any data to Redis.
285+
286+
287+
## Development
288+
- use `docker-compose up` to start the redis and postgres services
289+
- if you want to use fixtures, enter the redis-logger container (as root) and run `pip install faker` and `python generate_fixtures.py` to generate logs
290+
- script `generate_fixtures.py` will generate 1000 logs and push them to the redis list
291+
- this script will not work without installing the `faker` package, this is only for testing purposes (failed if accidentally run in production)

‎analyze_logs.py

+36-10
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
11
import argparse
22
import logging
33
import os
4-
import sqlite3
4+
import psycopg2
55
import redis
66
import json
77
import yaml
8+
import decimal
89

910

1011
def load_config():
@@ -15,7 +16,11 @@ def load_config():
1516

1617
config['REDIS_HOST'] = os.getenv('REDIS_HOST', config['redis']['host'])
1718
config['REDIS_PORT'] = int(os.getenv('REDIS_PORT', config['redis']['port']))
18-
config['SQLITE_DB_PATH'] = os.getenv('SQLITE_DB_PATH', config['sqlite']['db_path'])
19+
config['POSTGRES_HOST'] = os.getenv('POSTGRES_HOST', config['postgresql']['host'])
20+
config['POSTGRES_PORT'] = int(os.getenv('POSTGRES_PORT', config['postgresql']['port']))
21+
config['POSTGRES_DBNAME'] = os.getenv('POSTGRES_DBNAME', config['postgresql']['dbname'])
22+
config['POSTGRES_USER'] = os.getenv('POSTGRES_USER', config['postgresql']['user'])
23+
config['POSTGRES_PASSWORD'] = os.getenv('POSTGRES_PASSWORD', config['postgresql']['password'])
1924
config['LOG_LEVEL'] = os.getenv('LOG_LEVEL', config['log_level'])
2025

2126
logging.info("Configuration loaded successfully.")
@@ -27,9 +32,16 @@ def connect_redis(host, port):
2732
return redis.Redis(host=host, port=port, decode_responses=True)
2833

2934

30-
def connect_sqlite(db_path):
31-
"""Connect to SQLite."""
32-
return sqlite3.connect(db_path)
35+
def connect_postgresql(host, port, dbname, user, password):
36+
"""Connect to PostgreSQL."""
37+
conn = psycopg2.connect(
38+
host=host,
39+
port=port,
40+
dbname=dbname,
41+
user=user,
42+
password=password
43+
)
44+
return conn
3345

3446

3547
def execute_queries(conn, queries):
@@ -47,12 +59,17 @@ def execute_queries(conn, queries):
4759
'redis_key': query_info['redis_key']
4860
}
4961
logging.info(f"Query '{name}' executed successfully.")
50-
except sqlite3.Error as e:
62+
except psycopg2.Error as e:
5163
logging.error(f"Error executing query '{name}': {e}")
5264
results[name] = None
5365

5466
return results
5567

68+
class DecimalEncoder(json.JSONEncoder):
69+
def default(self, o):
70+
if isinstance(o, decimal.Decimal):
71+
return str(o)
72+
return super().default(o)
5673

5774
def save_results_to_redis(redis_client, results, dry_run):
5875
"""Save query results to Redis."""
@@ -64,7 +81,10 @@ def save_results_to_redis(redis_client, results, dry_run):
6481
redis_key = result['redis_key']
6582
data = result['data']
6683
if not dry_run:
67-
redis_client.set(redis_key, json.dumps(data))
84+
try:
85+
redis_client.set(redis_key, json.dumps(data, cls=DecimalEncoder))
86+
except Exception as e:
87+
logging.error(f"Error saving results for query '{name}' to Redis: {e}; {data}")
6888
logging.info(f"Results for query '{name}' saved to Redis key '{redis_key}'.")
6989
else:
7090
logging.info(f"--dry-run enabled. Results for query '{name}' not saved to Redis.")
@@ -74,7 +94,7 @@ def main():
7494
config = load_config()
7595

7696
# Argument parsing
77-
parser = argparse.ArgumentParser(description="Analyze logs from SQLite and save results to Redis.")
97+
parser = argparse.ArgumentParser(description="Analyze logs from PostgreSQL and save results to Redis.")
7898
parser.add_argument('--dry-run', action='store_true', help="Perform analysis without saving to Redis.")
7999
args = parser.parse_args()
80100

@@ -83,9 +103,15 @@ def main():
83103
format='%(asctime)s - %(levelname)s - %(message)s')
84104
logging.info("Logging configured successfully.")
85105

86-
# Connect to Redis and SQLite
106+
# Connect to Redis and PostgreSQL
87107
redis_client = connect_redis(config['REDIS_HOST'], config['REDIS_PORT'])
88-
with connect_sqlite(config['SQLITE_DB_PATH']) as conn:
108+
with connect_postgresql(
109+
config['POSTGRES_HOST'],
110+
config['POSTGRES_PORT'],
111+
config['POSTGRES_DBNAME'],
112+
config['POSTGRES_USER'],
113+
config['POSTGRES_PASSWORD']
114+
) as conn:
89115
try:
90116
# Execute queries from config
91117
queries = config.get('queries', [])

‎config.yml

+16-6
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,13 @@
11
redis:
2-
host: localhost
3-
port: 16379
2+
host: redis
3+
port: 6379
44

5-
sqlite:
6-
db_path: "logs.db"
5+
postgresql:
6+
host: postgres
7+
port: 5432
8+
dbname: mydatabase
9+
user: myuser
10+
password: mypassword
711

812
fields:
913
- http_time
@@ -12,16 +16,22 @@ fields:
1216
- http_method
1317
- http_status
1418
- http_request_time
19+
- http_uri
1520

1621
pause: 5
1722
log_level: DEBUG
1823

24+
# WARNING: The queries below are just examples and may not work with your data.
25+
# Queries must be written in PostgreSQL syntax.
26+
# Fields must be specified in the 'fields' section above.
1927
queries:
2028
- name: top_ips_by_cpu
2129
query: >
22-
SELECT http_remote_addr, SUM(http_request_time) AS total_time
30+
SELECT http_remote_addr,
31+
SUM(COALESCE(CAST(http_request_time AS numeric), 0)) AS total_time
2332
FROM logs
24-
WHERE http_time > datetime('now', '-1 hour')
33+
WHERE
34+
http_time::timestamp(6) > NOW() - INTERVAL '1 hour'
2535
GROUP BY http_remote_addr
2636
ORDER BY total_time DESC
2737
LIMIT 20

‎docker-compose.yml

+27
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
services:
2+
postgres:
3+
image: postgres:latest
4+
environment:
5+
POSTGRES_DB: mydatabase
6+
POSTGRES_USER: myuser
7+
POSTGRES_PASSWORD: mypassword
8+
ports:
9+
- "5432:5432"
10+
volumes:
11+
- postgres-data:/var/lib/postgresql/data
12+
13+
redis-logger:
14+
image: localhost/redis-logger:latest
15+
depends_on:
16+
- postgres
17+
- redis
18+
entrypoint: ["python", "main.py"]
19+
20+
redis:
21+
image: redis:latest
22+
ports:
23+
- "6379:6379"
24+
25+
volumes:
26+
postgres-data:
27+
driver: local

‎generate_fixtures.py

+46
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import redis
2+
import json
3+
import random
4+
from faker import Faker
5+
import time
6+
7+
fake = Faker()
8+
9+
def generate_fake_log():
10+
"""Generates a fake log with predefined fields."""
11+
log = {
12+
"http_time": fake.date_time_this_year().isoformat(),
13+
"http_vhost": fake.domain_name(),
14+
"http_remote_addr": fake.ipv4(),
15+
"http_method": random.choice(["GET", "POST", "PUT", "DELETE"]),
16+
"http_status": random.randint(100, 599),
17+
"http_request_time": random.uniform(0.1, 5.0),
18+
"http_uri": fake.uri_path()
19+
}
20+
return log
21+
22+
def sanitize_json(log):
23+
"""Ensure the JSON is well-formed, fixing invalid fields."""
24+
log_str = json.dumps(log) # Convert to valid JSON string
25+
return log_str
26+
27+
def insert_logs_to_redis(redis_client, num_logs=1000):
28+
"""Inserts generated logs into Redis."""
29+
for _ in range(num_logs):
30+
log = generate_fake_log()
31+
log_str = sanitize_json(log)
32+
redis_client.rpush("logs", log_str)
33+
print(f"Inserted log: {log_str}")
34+
time.sleep(0.1) # Add a small delay to avoid overwhelming Redis
35+
36+
def connect_to_redis():
37+
"""Connects to Redis and returns the client."""
38+
client = redis.Redis(host='redis', port=6379, decode_responses=True)
39+
return client
40+
41+
if __name__ == "__main__":
42+
# Connect to Redis
43+
redis_client = connect_to_redis()
44+
45+
# Insert 1000 fake logs into Redis
46+
insert_logs_to_redis(redis_client, num_logs=1000)

‎main.py

+106-71
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import logging
44
import os
55
import re
6-
import sqlite3
6+
import psycopg2
77
import time
88
import redis
99
import yaml
@@ -18,7 +18,13 @@ def load_config():
1818
config.update({
1919
'REDIS_HOST': os.getenv('REDIS_HOST', config['redis']['host']),
2020
'REDIS_PORT': int(os.getenv('REDIS_PORT', config['redis']['port'])),
21-
'SQLITE_DB_PATH': os.getenv('SQLITE_DB_PATH', config['sqlite']['db_path']),
21+
'POSTGRESQL': {
22+
'host': os.getenv('POSTGRESQL_HOST', config['postgresql']['host']),
23+
'port': int(os.getenv('POSTGRESQL_PORT', config['postgresql']['port'])),
24+
'dbname': os.getenv('POSTGRESQL_DBNAME', config['postgresql']['dbname']),
25+
'user': os.getenv('POSTGRESQL_USER', config['postgresql']['user']),
26+
'password': os.getenv('POSTGRESQL_PASSWORD', config['postgresql']['password'])
27+
},
2228
'FIELDS': os.getenv('FIELDS', ','.join(config['fields'])).split(','),
2329
'PAUSE': int(os.getenv('PAUSE', config['pause'])),
2430
'LOG_LEVEL': os.getenv('LOG_LEVEL', config['log_level'])
@@ -30,89 +36,107 @@ def load_config():
3036

3137
def parse_arguments():
3238
"""Parses command line arguments."""
33-
parser = argparse.ArgumentParser(description="Script for collecting logs from Redis to SQLite.")
39+
parser = argparse.ArgumentParser(description="Script for collecting logs from Redis to PostgreSQL.")
3440
parser.add_argument('-r', '--redis', type=str, help="Redis server host.")
3541
parser.add_argument('-p', '--port', type=int, help="Redis server port.")
36-
parser.add_argument('-d', '--db', type=str, help="Path to SQLite database.")
42+
parser.add_argument('-d', '--db', type=str, help="PostgreSQL database name.")
43+
parser.add_argument('-u', '--user', type=str, help="PostgreSQL user.")
44+
parser.add_argument('-w', '--password', type=str, help="PostgreSQL password.")
45+
parser.add_argument('-P', '--postgres_port', type=int, help="PostgreSQL port.")
46+
parser.add_argument('-H', '--postgres_host', type=str, help="PostgreSQL host.")
3747
parser.add_argument('-f', '--fields', type=str, help="Comma-separated list of fields.")
3848
parser.add_argument('-t', '--time', type=int, help="Pause between iterations (in seconds).")
3949
parser.add_argument('-l', '--log_level', type=str, help="Logging level.")
4050
return parser.parse_args()
4151

4252

43-
def initialize_database(db_path):
44-
"""Initializes the SQLite database and creates a table for logs."""
45-
with sqlite3.connect(db_path) as conn:
46-
cursor = conn.cursor()
47-
cursor.execute("""
48-
CREATE TABLE IF NOT EXISTS logs (
49-
id INTEGER PRIMARY KEY AUTOINCREMENT,
50-
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
51-
)
52-
""")
53-
conn.commit()
54-
logging.info("Database initialized and table created if not exists.")
55-
return db_path
56-
57-
58-
def create_dynamic_columns(db_path, fields):
59-
"""Creates dynamic columns for the SQLite table based on fields from the configuration."""
60-
with sqlite3.connect(db_path) as conn:
61-
cursor = conn.cursor()
62-
for field in fields:
53+
def initialize_database(config):
54+
"""Initializes the PostgreSQL database and creates a table for logs."""
55+
conn = psycopg2.connect(
56+
host=config['host'],
57+
port=config['port'],
58+
dbname=config['dbname'],
59+
user=config['user'],
60+
password=config['password']
61+
)
62+
cursor = conn.cursor()
63+
cursor.execute("""
64+
CREATE TABLE IF NOT EXISTS logs (
65+
id SERIAL PRIMARY KEY,
66+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
67+
)
68+
""")
69+
conn.commit()
70+
logging.info("Database initialized and table created if not exists.")
71+
return conn
72+
73+
74+
def create_dynamic_columns(conn, fields):
75+
cursor = conn.cursor()
76+
for field in fields:
77+
cursor.execute(f"""
78+
SELECT 1 FROM information_schema.columns
79+
WHERE table_name = 'logs' AND column_name = %s
80+
""", (field,))
81+
if not cursor.fetchone():
6382
try:
6483
cursor.execute(f"ALTER TABLE logs ADD COLUMN {field} TEXT")
65-
except sqlite3.OperationalError as e:
66-
if "duplicate column name" in str(e):
67-
logging.warning(f"Column {field} already exists.")
68-
else:
69-
logging.error(f"Unexpected error: {e}")
70-
conn.commit()
71-
logging.info("Dynamic columns created/verified.")
84+
conn.commit()
85+
except psycopg2.Error as e:
86+
conn.rollback()
87+
logging.error(f"Error adding column {field}: {e}")
88+
logging.info("Dynamic columns created/verified.")
7289

7390

74-
def clean_old_logs(db_path):
91+
def clean_old_logs(conn):
7592
"""Removes logs older than 24 hours."""
76-
with sqlite3.connect(db_path) as conn:
77-
cursor = conn.cursor()
78-
cursor.execute("DELETE FROM logs WHERE created_at < datetime('now', '-1 day')")
79-
conn.commit()
80-
logging.info("Old logs have been deleted.")
93+
cursor = conn.cursor()
94+
cursor.execute("DELETE FROM logs WHERE created_at < NOW() - INTERVAL '1 day'")
95+
conn.commit()
96+
logging.info("Old logs have been deleted.")
8197

8298

99+
def check_redis_connection(redis_client):
100+
"""Checks if the connection to Redis is working."""
101+
try:
102+
redis_client.ping()
103+
logging.info("Connection to Redis established.")
104+
except redis.exceptions.ConnectionError as e:
105+
logging.error("Connection to Redis failed. Error: %s", e)
106+
raise
107+
83108
def sanitize_json(log):
84109
"""Fixes invalid JSON by replacing empty values (e.g., "key":,) with "key": null."""
85110
invalid_field_pattern = r'"\w+":\s*,'
86111
sanitized_log = re.sub(invalid_field_pattern, lambda match: match.group(0).replace(":", ": null"), log)
87112
return sanitized_log
88113

89114

90-
def process_logs(db_path, redis_client, fields, batch_size=100):
91-
"""Processes logs from Redis and saves them to SQLite."""
92-
with sqlite3.connect(db_path) as conn:
93-
cursor = conn.cursor()
94-
while True:
95-
logs = [redis_client.lpop("logs") for _ in range(batch_size)]
96-
logs = [log for log in logs if log] # Remove None values
97-
if not logs:
98-
logging.info("No logs to process.")
99-
break
100-
101-
for log in logs:
102-
try:
103-
log = sanitize_json(log)
104-
parsed_log = json.loads(log)
105-
except json.JSONDecodeError:
106-
logging.error(f"Error parsing log: {log}")
107-
continue
108-
109-
values = [parsed_log.get(field, None) for field in fields]
110-
cursor.execute(f"""
111-
INSERT INTO logs ({', '.join(fields)})
112-
VALUES ({', '.join(['?'] * len(fields))})
113-
""", values)
114-
conn.commit()
115-
logging.info("Logs saved.")
115+
def process_logs(conn, redis_client, fields, batch_size=100):
116+
"""Processes logs from Redis and saves them to PostgreSQL."""
117+
cursor = conn.cursor()
118+
while True:
119+
logs = [redis_client.lpop("logs") for _ in range(batch_size)]
120+
logs = [log for log in logs if log] # Remove None values
121+
if not logs:
122+
logging.info("No logs to process.")
123+
break
124+
125+
for log in logs:
126+
try:
127+
log = sanitize_json(log)
128+
parsed_log = json.loads(log)
129+
except json.JSONDecodeError as e:
130+
logging.error(f"Error parsing log: {log}; {e}")
131+
continue
132+
133+
values = [parsed_log.get(field, None) for field in fields]
134+
cursor.execute(f"""
135+
INSERT INTO logs ({', '.join(fields)})
136+
VALUES ({', '.join(['%s'] * len(fields))})
137+
""", values)
138+
conn.commit()
139+
logging.info("Logs saved.")
116140

117141

118142
def main():
@@ -123,7 +147,13 @@ def main():
123147
config.update({
124148
'REDIS_HOST': args.redis or config['REDIS_HOST'],
125149
'REDIS_PORT': args.port or config['REDIS_PORT'],
126-
'SQLITE_DB_PATH': args.db or config['SQLITE_DB_PATH'],
150+
'POSTGRESQL': {
151+
'host': config['POSTGRESQL']['host'],
152+
'port': config['POSTGRESQL']['port'],
153+
'dbname': args.db or config['POSTGRESQL']['dbname'],
154+
'user': config['POSTGRESQL']['user'],
155+
'password': config['POSTGRESQL']['password']
156+
},
127157
'FIELDS': args.fields.split(',') if args.fields else config['FIELDS'],
128158
'PAUSE': args.time or config['PAUSE'],
129159
'LOG_LEVEL': args.log_level or config['LOG_LEVEL']
@@ -140,22 +170,27 @@ def main():
140170
logging.info("Connected to Redis.")
141171

142172
try:
143-
db_path = initialize_database(config['SQLITE_DB_PATH'])
144-
except sqlite3.Error as e:
145-
logging.error(f"Error initializing database: {e}; PATH: {config['SQLITE_DB_PATH']}")
146-
logging.error(f"Owner and permissions of the file: {os.stat(config['SQLITE_DB_PATH'])}")
173+
conn = initialize_database(config['POSTGRESQL'])
174+
except psycopg2.Error as e:
175+
logging.error(f"Error initializing database: {e}")
176+
return
177+
178+
try:
179+
check_redis_connection(redis_client)
180+
except redis.exceptions.ConnectionError:
147181
return
148-
create_dynamic_columns(db_path, config['FIELDS'])
182+
create_dynamic_columns(conn, config['FIELDS'])
149183

150184
try:
151185
while True:
152-
process_logs(db_path, redis_client, config['FIELDS'])
153-
clean_old_logs(db_path)
186+
process_logs(conn, redis_client, config['FIELDS'])
187+
clean_old_logs(conn)
154188
time.sleep(config['PAUSE'])
155189
except KeyboardInterrupt:
156190
logging.info("Script terminated by user.")
157191
finally:
158192
logging.info("Script finished.")
193+
conn.close()
159194

160195

161196
if __name__ == "__main__":

‎poetry.lock

+128-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

‎pyproject.toml

+6
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,13 @@ readme = "README.md"
99
python = "^3.11"
1010
redis = "5.2.0"
1111
PyYAML = "6.0.2"
12+
psycopg2-binary = "2.9.10"
1213

1314
[build-system]
1415
requires = ["poetry-core"]
1516
build-backend = "poetry.core.masonry.api"
17+
18+
# dev dependencies
19+
[tool.poetry.dev-dependencies]
20+
faker = "33.0.0"
21+
redis = "5.2.0"

0 commit comments

Comments
 (0)
Please sign in to comment.