Skip to content

Commit d0f9837

Browse files
committed
C360 Fashion Retail demo setup with Docker Compose, Postgres schema, ystem instructions update and CSV data seeding
PromptQl Config file update
1 parent 281c805 commit d0f9837

File tree

4 files changed

+39
-137
lines changed

4 files changed

+39
-137
lines changed

.data/c360-fashion-retail/compose.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,6 @@ services:
2020
timeout: 5s
2121
retries: 5
2222

23-
23+
2424
volumes:
2525
postgres_data:

.data/c360-fashion-retail/postgres/1-create-retail-pg.sql

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
CREATE DATABASE retail_fashion;
2-
31
\c retail_fashion;
42

53
-- Dumped from database version 17.6 (Debian 17.6-1.pgdg13+1)
@@ -8,7 +6,6 @@ CREATE DATABASE retail_fashion;
86
SET statement_timeout = 0;
97
SET lock_timeout = 0;
108
SET idle_in_transaction_session_timeout = 0;
11-
SET transaction_timeout = 0;
129
SET client_encoding = 'UTF8';
1310
SET standard_conforming_strings = on;
1411
SELECT pg_catalog.set_config('search_path', '', false);
@@ -513,26 +510,6 @@ CREATE TABLE public.customers (
513510

514511
ALTER TABLE public.customers OWNER TO postgres;
515512

516-
--
517-
-- Name: data_compatibility_report; Type: VIEW; Schema: public; Owner: postgres
518-
--
519-
520-
CREATE VIEW public.data_compatibility_report AS
521-
SELECT assessment_type,
522-
table_name,
523-
column_name,
524-
total_rows,
525-
non_null_rows,
526-
convertible_rows,
527-
conversion_success_rate,
528-
sample_unconvertible,
529-
max_length,
530-
recommended_action
531-
FROM public.assess_data_compatibility() assess_data_compatibility(assessment_type, table_name, column_name, total_rows, non_null_rows, convertible_rows, conversion_success_rate, sample_unconvertible, max_length, recommended_action);
532-
533-
534-
ALTER VIEW public.data_compatibility_report OWNER TO postgres;
535-
536513
--
537514
-- Name: email_engagement; Type: TABLE; Schema: public; Owner: postgres
538515
--
@@ -1548,5 +1525,4 @@ ALTER TABLE ONLY public.style_similarity_matches
15481525
-- PostgreSQL database dump complete
15491526
--
15501527

1551-
\unrestrict g7bUKWIv6sLlBRnt7yPasCEbL1SUmP43hOZ92X8UWUFG248Sh2it96UBTnHVpgk
15521528

.data/c360-fashion-retail/postgres/2-import-data.sql

Lines changed: 37 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -43,79 +43,46 @@
4343
-- Load style similarity matches (depends on products)
4444
\COPY style_similarity_matches FROM '/docker-entrypoint-initdb.d/style_similarity_matches.csv' CSV HEADER;
4545

46-
-- Load additional tables if CSV files exist (these might be empty)
47-
-- Use conditional loading to avoid errors if files don't exist
48-
46+
-- Load optional/auxiliary CSVs if they exist (psql \COPY with ON_ERROR_STOP avoids failure)
47+
\COPY campaigns FROM '/docker-entrypoint-initdb.d/campaigns.csv' CSV HEADER;
48+
\COPY campaign_responses FROM '/docker-entrypoint-initdb.d/campaign_responses.csv' CSV HEADER;
49+
\COPY customer_service_interactions FROM '/docker-entrypoint-initdb.d/customer_service_interactions.csv' CSV HEADER;
50+
\COPY email_engagement FROM '/docker-entrypoint-initdb.d/email_engagement.csv' CSV HEADER;
51+
\COPY loyalty_activities FROM '/docker-entrypoint-initdb.d/loyalty_activities.csv' CSV HEADER;
52+
\COPY loyalty_profiles FROM '/docker-entrypoint-initdb.d/loyalty_profiles.csv' CSV HEADER;
53+
\COPY session_summary FROM '/docker-entrypoint-initdb.d/session_summary.csv' CSV HEADER;
54+
55+
-- Ensure ON CONFLICT inserts work (add unique constraint on session_summary.date if not exists)
4956
DO $$
5057
BEGIN
51-
-- Try to load campaigns
52-
BEGIN
53-
COPY campaigns FROM '/docker-entrypoint-initdb.d/campaigns.csv' CSV HEADER;
54-
EXCEPTION WHEN OTHERS THEN
55-
RAISE NOTICE 'campaigns.csv not found or empty, skipping...';
56-
END;
57-
58-
-- Try to load campaign responses
59-
BEGIN
60-
COPY campaign_responses FROM '/docker-entrypoint-initdb.d/campaign_responses.csv' CSV HEADER;
61-
EXCEPTION WHEN OTHERS THEN
62-
RAISE NOTICE 'campaign_responses.csv not found or empty, skipping...';
63-
END;
64-
65-
-- Try to load customer service interactions
66-
BEGIN
67-
COPY customer_service_interactions FROM '/docker-entrypoint-initdb.d/customer_service_interactions.csv' CSV HEADER;
68-
EXCEPTION WHEN OTHERS THEN
69-
RAISE NOTICE 'customer_service_interactions.csv not found or empty, skipping...';
70-
END;
71-
72-
-- Try to load email engagement
73-
BEGIN
74-
COPY email_engagement FROM '/docker-entrypoint-initdb.d/email_engagement.csv' CSV HEADER;
75-
EXCEPTION WHEN OTHERS THEN
76-
RAISE NOTICE 'email_engagement.csv not found or empty, skipping...';
77-
END;
78-
79-
-- Try to load loyalty activities
80-
BEGIN
81-
COPY loyalty_activities FROM '/docker-entrypoint-initdb.d/loyalty_activities.csv' CSV HEADER;
82-
EXCEPTION WHEN OTHERS THEN
83-
RAISE NOTICE 'loyalty_activities.csv not found or empty, skipping...';
84-
END;
85-
86-
-- Try to load loyalty profiles
87-
BEGIN
88-
COPY loyalty_profiles FROM '/docker-entrypoint-initdb.d/loyalty_profiles.csv' CSV HEADER;
89-
EXCEPTION WHEN OTHERS THEN
90-
RAISE NOTICE 'loyalty_profiles.csv not found or empty, skipping...';
91-
END;
92-
93-
-- Try to load session summary
94-
BEGIN
95-
COPY session_summary FROM '/docker-entrypoint-initdb.d/session_summary.csv' CSV HEADER;
96-
EXCEPTION WHEN OTHERS THEN
97-
RAISE NOTICE 'session_summary.csv not found or empty, skipping...';
98-
END;
99-
58+
IF NOT EXISTS (
59+
SELECT 1
60+
FROM pg_constraint
61+
WHERE conname = 'session_summary_date_key'
62+
) THEN
63+
EXECUTE 'ALTER TABLE session_summary ADD CONSTRAINT session_summary_date_key UNIQUE (summary_date)';
64+
END IF;
10065
END $$;
10166

10267
-- Update sequences to match the highest IDs from imported data
103-
-- Only update sequences for tables that have auto-incrementing IDs
10468
DO $$
10569
DECLARE
10670
seq_name TEXT;
107-
table_name TEXT;
71+
tbl_name TEXT;
72+
col_name TEXT;
10873
max_id INTEGER;
10974
BEGIN
110-
-- Update sequences for tables with serial columns
111-
FOR seq_name, table_name IN
112-
SELECT sequence_name, table_name
75+
-- Find each sequence and its owning table/column
76+
FOR seq_name, tbl_name, col_name IN
77+
SELECT s.sequence_name, c.table_name, c.column_name
11378
FROM information_schema.sequences s
114-
JOIN information_schema.columns c ON c.column_default LIKE '%' || s.sequence_name || '%'
79+
JOIN information_schema.columns c
80+
ON c.column_default LIKE ('%' || s.sequence_name || '%')
11581
WHERE s.sequence_schema = 'public'
11682
LOOP
117-
EXECUTE format('SELECT COALESCE(MAX(%I), 0) FROM %I',
118-
replace(seq_name, '_seq', ''), table_name) INTO max_id;
83+
EXECUTE format('SELECT COALESCE(MAX(%I), 0) FROM %I', col_name, tbl_name)
84+
INTO max_id;
85+
11986
IF max_id > 0 THEN
12087
EXECUTE format('SELECT setval(%L, %s, true)', seq_name, max_id);
12188
RAISE NOTICE 'Updated sequence % to %', seq_name, max_id;
@@ -147,17 +114,17 @@ BEGIN
147114

148115
FOR rec IN
149116
SELECT
150-
t.tablename,
117+
t.table_name,
151118
COALESCE(s.n_tup_ins, 0) as rows_loaded
152119
FROM information_schema.tables t
153-
LEFT JOIN pg_stat_user_tables s ON s.tablename = t.table_name AND s.schemaname = 'public'
120+
LEFT JOIN pg_stat_user_tables s
121+
ON s.relname = t.table_name AND s.schemaname = 'public'
154122
WHERE t.table_schema = 'public'
155-
AND t.table_type = 'BASE TABLE'
156-
-- Exclude staging and utility tables
157-
AND t.table_name NOT LIKE 'staging_%'
123+
AND t.table_type = 'BASE TABLE'
124+
AND t.table_name NOT LIKE 'staging_%'
158125
ORDER BY t.table_name
159126
LOOP
160-
RAISE NOTICE 'Table: % - Rows: %', rec.tablename, rec.rows_loaded;
127+
RAISE NOTICE 'Table: % - Rows: %', rec.table_name, rec.rows_loaded;
161128
total_tables := total_tables + 1;
162129
total_rows := total_rows + rec.rows_loaded;
163130
END LOOP;
@@ -176,17 +143,13 @@ SELECT
176143
s.last_analyze,
177144
pg_size_pretty(pg_total_relation_size('public.'||t.table_name)) as table_size
178145
FROM information_schema.tables t
179-
LEFT JOIN pg_stat_user_tables s ON s.tablename = t.table_name AND s.schemaname = 'public'
146+
LEFT JOIN pg_stat_user_tables s ON s.relname = t.table_name AND s.schemaname = 'public'
180147
WHERE t.table_schema = 'public'
181-
AND t.table_type = 'BASE TABLE'
182-
AND t.table_name NOT LIKE 'staging_%'
148+
AND t.table_type = 'BASE TABLE'
149+
AND t.table_name NOT LIKE 'staging_%'
183150
ORDER BY s.n_tup_ins DESC NULLS LAST, t.table_name;
184151

185-
-- Grant permissions (if needed)
186-
-- GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA public TO your_app_user;
187-
-- GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public TO your_app_user;
188-
189152
ANALYZE; -- Update table statistics for query optimization
190153

191154
\echo 'C360 Retail Fashion Database loading completed successfully!'
192-
\echo 'Use: SELECT * FROM table_row_counts; to see loaded data summary'
155+
\echo 'Use: SELECT * FROM table_row_counts; to see loaded data summary'

demos/C360-fashion-retail/globals/metadata/promptql-config.hml

Lines changed: 1 addition & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -57,44 +57,7 @@ definition:
5757
- Output format confirmed (side-by-side vs long)
5858
</pre_execution_validation>
5959

60-
<mandatory_checkpoint_enforcement>
61-
**HARD STOP PROTOCOL - NO EXCEPTIONS**
62-
63-
Before executing ANY query or analysis, you MUST complete the following checkpoint sequence:
64-
65-
**CHECKPOINT 1: QUESTION CLASSIFICATION**
66-
- Type: "QUESTION CLASSIFICATION:"
67-
- Identify entity-level
68-
- If unclear, STOP and ask user for clarification.
69-
- **BLOCKING RULE**: Cannot proceed without explicit classification.
70-
71-
**CHECKPOINT 2: COMPOSITE KEY VALIDATION**
72-
- Type: "COMPOSITE KEY REQUIRED:"
73-
- Identify composite key fields relevant to classification
74-
- Type: "Composite key identified = YES"
75-
- **BLOCKING RULE**: Cannot write SQL without identifying required composite key.
76-
77-
**CHECKPOINT 3: BASE CTE CONFIRMATION**
78-
- Type: "BASE CTE STRUCTURE:"
79-
- Write: "WITH [table_name]_base AS (SELECT DISTINCT [composite_key_fields] FROM...)"
80-
- Type: "Base CTE planned = YES"
81-
- **BLOCKING RULE**: Cannot aggregate without base CTE structure.
82-
83-
**CHECKPOINT 4: AGGREGATION VALIDATION**
84-
- Type: "AGGREGATION APPROACH:"
85-
- Confirm: "Aggregating FROM base CTE, NOT from raw table"
86-
- Type: "Aggregation method validated = YES"
87-
- **BLOCKING RULE**: Cannot execute query that aggregates directly from raw table.
88-
89-
**CHECKPOINT 5: EXECUTION CLEARANCE**
90-
- Type: "EXECUTION CLEARANCE CHECKLIST:"
91-
- "✓ Classification completed"
92-
- "✓ Composite key identified"
93-
- "✓ Base CTE planned"
94-
- "✓ Aggregation validated"
95-
- Type: "CLEARED FOR EXECUTION = YES"
96-
- **BLOCKING RULE**: Cannot execute without all four checkmarks.
97-
</mandatory_checkpoint_enforcement>
60+
9861

9962
<execution_protocol>
10063
Always follow this mandatory 7-step execution method:

0 commit comments

Comments
 (0)