site updates
This commit is contained in:
22
projects/dev/python-prompt-crafting/main.py
Normal file
22
projects/dev/python-prompt-crafting/main.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import csv
|
||||
import random
|
||||
|
||||
def read_seeds(filename):
|
||||
seeds = {}
|
||||
with open(filename, newline='') as csvfile:
|
||||
reader = csv.DictReader(csvfile)
|
||||
for row in reader:
|
||||
category = row['Category']
|
||||
seed = row['Seed']
|
||||
if category not in seeds:
|
||||
seeds[category] = []
|
||||
seeds[category].append(seed)
|
||||
return seeds
|
||||
|
||||
def generate_prompt(seeds):
|
||||
prompt_parts = [random.choice(seeds[category]) for category in seeds]
|
||||
return f"A wallpaper with a {' '.join(prompt_parts)}."
|
||||
|
||||
# Reading seeds from a CSV file and generating a prompt
|
||||
seeds = read_seeds('seeds.csv')
|
||||
print(generate_prompt(seeds))
|
||||
15
projects/dev/python-prompt-crafting/prompt-seed.csv
Normal file
15
projects/dev/python-prompt-crafting/prompt-seed.csv
Normal file
@@ -0,0 +1,15 @@
|
||||
Category,Seed
|
||||
Background,deep black background
|
||||
Background,space-themed background
|
||||
Background,starry night background
|
||||
Lights,subtle dim lights
|
||||
Lights,muted shades of lights
|
||||
Lights,glowing orbs
|
||||
Colors,purple
|
||||
Colors,green
|
||||
Colors,darker orange
|
||||
Composition,centered image
|
||||
Composition,harmonizing elements
|
||||
Composition,evoking a sense of calm
|
||||
Aspect_Ratio,21:9 aspect ratio
|
||||
Aspect_Ratio,suitable for a 2x2 grid of monitors
|
||||
|
125
projects/dev/python-prompt-crafting/random.md
Normal file
125
projects/dev/python-prompt-crafting/random.md
Normal file
@@ -0,0 +1,125 @@
|
||||
# Midjourney Prompt Component Management System
|
||||
|
||||
## Executive Summary
|
||||
This system is engineered to offer a scalable and extensible solution for the management of Midjourney prompt components. Designed with future growth in mind, it provides a structured approach for CSV input, streamlines CRUD operations, and enforces data integrity through meticulous sanity checks, ensuring adaptability to evolving requirements.
|
||||
|
||||
## Project Scope
|
||||
The project will deliver a comprehensive management system supporting a variety of Midjourney prompt components, including styles, scenes, suffixes, and parameters. It is designed to cater to a broad spectrum of artistic and descriptive elements, thereby enhancing the Midjourney user's creative process.
|
||||
|
||||
|
||||
## Database Design and Schema
|
||||
The database is compartmentalized into four principal tables:
|
||||
|
||||
- `prefixes`: Captures artistic styles and mediums.
|
||||
- `id`: INTEGER, PRIMARY KEY, Auto-incremented.
|
||||
- `value`: VARCHAR(255), Descriptive text.
|
||||
- `created_at`: TIMESTAMP, Record creation timestamp.
|
||||
- `updated_at`: TIMESTAMP, Record update timestamp.
|
||||
|
||||
- `scenes`: Encompasses various scene descriptors.
|
||||
- Columns mirror the `prefixes` table structure.
|
||||
|
||||
- `suffixes`: Houses additional descriptive elements.
|
||||
- Columns mirror the `prefixes` table structure.
|
||||
|
||||
- `parameters`: Stores Midjourney-specific parameters.
|
||||
- Columns mirror the `prefixes` table structure.
|
||||
|
||||
Each table's `value` column is indexed to enhance search efficiency. The schema is designed to be flexible, allowing for future expansions such as additional metadata fields.
|
||||
|
||||
## System Requirements
|
||||
- PostgreSQL database server
|
||||
- Python 3.x environment
|
||||
- Python libraries: `pandas` for CSV file processing, `psycopg2` for PostgreSQL interaction
|
||||
|
||||
## CSV Processing and Database Interaction
|
||||
The system is adept at processing CSV file inputs, seamlessly handling duplicate entries through intelligent validation routines. It ensures that each new entry is unique and relevant, while gracefully managing errors and discrepancies to maintain the integrity of the database content.
|
||||
|
||||
## Logging Mechanism
|
||||
Each database transaction is meticulously logged, providing a trail that includes timestamps, user identifiers, and a description of the operation performed. This facilitates audit processes and ensures transparency of changes.
|
||||
|
||||
## Error Handling
|
||||
The system incorporates robust error-handling mechanisms, ensuring stability and reliability. Specific strategies include transaction rollbacks in case of process failures, validation checks against CSV input formats, and alerting mechanisms for any anomalies detected. These measures are designed to maintain the integrity of the data and provide seamless continuity of operations.
|
||||
|
||||
## Security Considerations
|
||||
Security is paramount in our system design, particularly concerning database interactions. We employ best practices such as using environment variables for sensitive credentials, prepared statements to thwart SQL injection threats, and encrypted connections to the database. These layers of security guarantee that the database interactions are secure and the system's integrity is upheld.
|
||||
|
||||
## Implementation Overview
|
||||
- CSV file inputs are parsed to extract data.
|
||||
- Database connections are established using secure protocols.
|
||||
- Data is validated and reconciled with existing records to perform necessary CRUD operations.
|
||||
- Operations are logged with comprehensive details for accountability.
|
||||
|
||||
## Conclusion
|
||||
This document articulates the foundation for a system designed to streamline the management of Midjourney prompt components. It is built with an eye towards scalability, ease of use, and meticulous record-keeping. The system is poised to be an integral tool for users seeking to augment their Midjourney experience.
|
||||
|
||||
---
|
||||
|
||||
# Import the necessary Python libraries
|
||||
|
||||
```python
|
||||
import pandas as pd
|
||||
import psycopg2
|
||||
from datetime import datetime
|
||||
|
||||
# Function to log actions to a file
|
||||
def log_entry(action, value, user_info=None, existing_data=None):
|
||||
"""
|
||||
Logs actions performed by the script with timestamps to 'log.txt'.
|
||||
"""
|
||||
timestamp = datetime.now() # Get the current time for the log entry
|
||||
log_message = f"[{timestamp}] {action}: {value}" # Construct the log message
|
||||
|
||||
# Append user information if provided
|
||||
if user_info:
|
||||
log_message += f" by {user_info}"
|
||||
# Append existing data if available
|
||||
if existing_data:
|
||||
log_message += f". Existing data: {existing_data}"
|
||||
|
||||
# Write the constructed message to the log file
|
||||
with open('log.txt', 'a') as log_file:
|
||||
log_file.write(log_message + "\n")
|
||||
|
||||
# Function to process individual rows from the CSV file
|
||||
def process_csv_row(row, cur, conn):
|
||||
"""
|
||||
Checks for the existence of the 'value' in the database and performs the
|
||||
appropriate CRUD operation based on the result.
|
||||
"""
|
||||
cur.execute("SELECT * FROM mytable WHERE value = %s", (row['value'],))
|
||||
result = cur.fetchone() # Fetch the first match from the database
|
||||
|
||||
if result:
|
||||
# If the entry exists, log it
|
||||
log_entry("Entry exists", row['value'], existing_data=result['metadata'])
|
||||
else:
|
||||
# If not, insert the new value into the database and log the action
|
||||
cur.execute("INSERT INTO mytable (value, metadata, created_at) VALUES (%s, %s, %s)",
|
||||
(row['value'], 'user_info', datetime.now()))
|
||||
conn.commit() # Commit changes to the database
|
||||
log_entry("Added new entry", row['value'], user_info='user_info')
|
||||
|
||||
# The main function to run the script
|
||||
def main(csv_file_path):
|
||||
"""
|
||||
The main execution function that reads the CSV file, processes each row,
|
||||
and interacts with the database.
|
||||
"""
|
||||
df = pd.read_csv(csv_file_path) # Load the CSV file into a DataFrame
|
||||
conn = psycopg2.connect("dbname=mydb user=myuser") # Connect to the database
|
||||
cur = conn.cursor() # Create a cursor object to execute SQL commands
|
||||
|
||||
# Iterate through the DataFrame row by row
|
||||
for index, row in df.iterrows():
|
||||
process_csv_row(row, cur, conn) # Process each row
|
||||
|
||||
cur.close() # Close the cursor
|
||||
conn.close() # Close the database connection
|
||||
|
||||
# Check if the script is being run directly (as opposed to being imported)
|
||||
if __name__ == "__main__":
|
||||
# Call the main function with the path to the CSV file
|
||||
main("input.csv")
|
||||
```
|
||||
---
|
||||
Reference in New Issue
Block a user