-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcore.py
136 lines (108 loc) · 3.93 KB
/
core.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
import os
import shutil
import logging
from typing import Tuple
class FileHandler:
@staticmethod
def _validate_paths(*paths) -> Tuple[bool, str]:
for path in paths:
if not os.path.exists(os.path.dirname(path)):
return False, f"Directory does not exist for path: {path}"
return True, ""
@staticmethod
def copy_file(src: str, dst: str) -> str:
try:
valid, error = FileHandler._validate_paths(src, dst)
if not valid:
return error
shutil.copy2(src, dst)
logging.info(f"Copied file from {src} to {dst}")
return f"File successfully copied from {src} to {dst}"
except Exception as e:
logging.error(f"Error copying file: {e}")
return f"Error copying file: {str(e)}"
@staticmethod
def move_file(src: str, dst: str) -> str:
try:
valid, error = FileHandler._validate_paths(src, dst)
if not valid:
return error
shutil.move(src, dst)
logging.info(f"Moved file from {src} to {dst}")
return f"File successfully moved from {src} to {dst}"
except Exception as e:
logging.error(f"Error moving file: {e}")
return f"Error moving file: {str(e)}"
@staticmethod
def delete_file(filepath: str) -> str:
try:
if not os.path.exists(filepath):
return f"File does not exist: {filepath}"
os.remove(filepath)
logging.info(f"Deleted file {filepath}")
return f"File {filepath} successfully deleted"
except Exception as e:
logging.error(f"Error deleting file: {e}")
return f"Error deleting file: {str(e)}"
import redis
from kafka import KafkaProducer, KafkaConsumer
import pika
# Redis setup
redis_client = redis.StrictRedis(host='localhost', port=6379, db=0)
# Kafka setup
kafka_producer = KafkaProducer(bootstrap_servers='localhost:9092')
kafka_consumer = KafkaConsumer('swot-topic', bootstrap_servers='localhost:9092')
# RabbitMQ setup
rabbitmq_connection = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
rabbitmq_channel = rabbitmq_connection.channel()
rabbitmq_channel.queue_declare(queue='swot-queue')
# Example Kafka usage
def send_kafka_message(topic, message):
kafka_producer.send(topic, message.encode('utf-8'))
kafka_producer.flush()
# Example RabbitMQ usage
def send_rabbitmq_message(queue, message):
rabbitmq_channel.basic_publish(exchange='', routing_key=queue, body=message)
# Example Redis usage
def cache_data(key, value):
redis_client.set(key, value)
def get_cached_data(key):
return redis_client.get(key)
from influxdb import InfluxDBClient
import psycopg2
from immudb.client import ImmudbClient
# InfluxDB setup
influx_client = InfluxDBClient(host='localhost', port=8086, database='swot')
# PostgreSQL setup
postgres_connection = psycopg2.connect(
dbname='swot_db',
user='postgres_user',
password='postgres_password',
host='localhost',
port=5432
)
# Immudb setup
immudb_client = ImmudbClient()
immudb_client.login(username='immudb', password='immudb_password')
# Example InfluxDB usage
def write_influx_data(data):
influx_client.write_points(data)
# Example PostgreSQL usage
def execute_postgres_query(query, params=None):
with postgres_connection.cursor() as cursor:
cursor.execute(query, params)
postgres_connection.commit()
# Example Immudb usage
def write_immutable_data(key, value):
immudb_client.set(key.encode('utf-8'), value.encode('utf-8'))
def get_immutable_data(key):
return immudb_client.get(key.encode('utf-8'))
import ipfshttpclient
# IPFS setup
ipfs_client = ipfshttpclient.connect('/ip4/127.0.0.1/tcp/5001')
# Example IPFS usage
def add_to_ipfs(file_path):
result = ipfs_client.add(file_path)
return result['Hash']
def get_from_ipfs(file_hash):
return ipfs_client.cat(file_hash)