Refactor GeoJSON seeder to insert data in smaller batches to prevent timeouts; added chunking logic and improved error handling. Update permission seeder to batch create permissions for roles and resources. Optimize unit seeder to prepare and insert unit data in bulk, enhancing performance. Adjust Supabase configuration for local development and modify migration scripts to restore GIS functions and types.

This commit is contained in:
vergiLgood1 2025-05-06 21:37:28 +07:00
parent 969d10958c
commit 57fb1e4e46
18 changed files with 1275 additions and 341 deletions

3
sigap-website/.vscode/extensions.json vendored Normal file
View File

@ -0,0 +1,3 @@
{
"recommendations": ["denoland.vscode-deno"]
}

View File

@ -1,3 +1,25 @@
{ {
"[typescript]": {
"editor.defaultFormatter": "denoland.vscode-deno"
},
"deno.enablePaths": [
"supabase/functions"
],
"deno.lint": true,
"deno.unstable": [
"bare-node-builtins",
"byonm",
"sloppy-imports",
"unsafe-proto",
"webgpu",
"broadcast-channel",
"worker-options",
"cron",
"kv",
"ffi",
"fs",
"http",
"net"
],
"files.autoSave": "off" "files.autoSave": "off"
} }

View File

@ -263,7 +263,6 @@ model incident_logs {
model units { model units {
code_unit String @id @unique @db.VarChar(20) code_unit String @id @unique @db.VarChar(20)
district_id String @db.VarChar(20) district_id String @db.VarChar(20)
city_id String @db.VarChar(20)
name String @db.VarChar(100) name String @db.VarChar(100)
description String? description String?
type unit_type type unit_type
@ -275,9 +274,10 @@ model units {
longitude Float longitude Float
location Unsupported("geography") location Unsupported("geography")
phone String? phone String?
city_id String @db.VarChar(20)
unit_statistics unit_statistics[] unit_statistics unit_statistics[]
districts districts @relation(fields: [district_id], references: [id], onDelete: Cascade, onUpdate: NoAction)
cities cities @relation(fields: [city_id], references: [id], onDelete: Cascade, onUpdate: NoAction) cities cities @relation(fields: [city_id], references: [id], onDelete: Cascade, onUpdate: NoAction)
districts districts @relation(fields: [district_id], references: [id], onDelete: Cascade, onUpdate: NoAction)
@@index([name], map: "idx_units_name") @@index([name], map: "idx_units_name")
@@index([type], map: "idx_units_type") @@index([type], map: "idx_units_type")

View File

@ -35,6 +35,10 @@ export class CrimeCategoriesSeeder {
const sheet = workbook.Sheets[workbook.SheetNames[0]]; const sheet = workbook.Sheets[workbook.SheetNames[0]];
const data = XLSX.utils.sheet_to_json(sheet) as ICrimeCategory[]; const data = XLSX.utils.sheet_to_json(sheet) as ICrimeCategory[];
// Prepare array for batch insertion
const categoriesToCreate = [];
// Generate IDs and prepare data for batch insertion
for (const category of crimeCategoriesData) { for (const category of crimeCategoriesData) {
const newId = await generateIdWithDbCounter('crime_categories', { const newId = await generateIdWithDbCounter('crime_categories', {
prefix: 'CC', prefix: 'CC',
@ -46,32 +50,41 @@ export class CrimeCategoriesSeeder {
uniquenessStrategy: 'counter', uniquenessStrategy: 'counter',
}); });
await this.prisma.crime_categories.create({ categoriesToCreate.push({
data: { id: newId.trim(),
id: newId.trim(), name: category.name,
name: category.name, description: category.description,
description: category.description,
},
}); });
console.log(`Seeding crime category: ${category.name}`);
} }
// Update existing records // Batch create categories
for (const row of data) { await this.prisma.crime_categories.createMany({
const id = row['id'].trim(); data: categoriesToCreate,
const name = row['name'].trim(); skipDuplicates: true,
const type = row['type'].trim(); });
await this.prisma.crime_categories.updateMany({ console.log(`Batch created ${categoriesToCreate.length} crime categories.`);
where: { id },
data: {
type,
},
});
console.log(`Updating crime category: ${name} to type ${type}`); // Prepare data for batch update
} const categoriesToUpdate = data.map((row) => ({
id: row['id'].trim(),
type: row['type'].trim(),
name: row['name'].trim(),
}));
// Batch update is not directly supported by Prisma, so we'll use Promise.all with individual updates
await Promise.all(
categoriesToUpdate.map((category) =>
this.prisma.crime_categories.updateMany({
where: { id: category.id },
data: { type: category.type },
})
)
);
console.log(
`Updated types for ${categoriesToUpdate.length} crime categories.`
);
console.log(`${crimeCategoriesData.length} crime categories seeded`); console.log(`${crimeCategoriesData.length} crime categories seeded`);
} }

View File

@ -212,6 +212,34 @@ export class CrimeIncidentsSeeder {
return points; return points;
} }
// Helper for chunked insertion
private async chunkedInsertIncidents(data: any[], chunkSize: number = 200) {
for (let i = 0; i < data.length; i += chunkSize) {
const chunk = data.slice(i, i + chunkSize);
await this.prisma.crime_incidents.createMany({
data: chunk,
skipDuplicates: true,
});
}
}
// Helper for chunked Supabase insert
private async chunkedInsertLocations(
locations: any[],
chunkSize: number = 200
) {
for (let i = 0; i < locations.length; i += chunkSize) {
const chunk = locations.slice(i, i + chunkSize);
let { error } = await this.supabase
.from('locations')
.insert(chunk)
.select();
if (error) {
throw error;
}
}
}
async run(): Promise<void> { async run(): Promise<void> {
console.log('🌱 Seeding crime incidents data...'); console.log('🌱 Seeding crime incidents data...');
@ -372,7 +400,7 @@ export class CrimeIncidentsSeeder {
const user = await this.prisma.users.findFirst({ const user = await this.prisma.users.findFirst({
where: { where: {
email: 'admin@gmail.com', email: 'sigapcompany@gmail.com',
}, },
select: { select: {
id: true, id: true,
@ -414,6 +442,9 @@ export class CrimeIncidentsSeeder {
); );
} }
const incidentsToCreate = [];
const locationsToCreate = [];
// Create incidents based on the number_of_crime value // Create incidents based on the number_of_crime value
for (let i = 0; i < crime.number_of_crime; i++) { for (let i = 0; i < crime.number_of_crime; i++) {
// Select random category // Select random category
@ -476,39 +507,9 @@ export class CrimeIncidentsSeeder {
location: `POINT(${selectedLocation.longitude} ${selectedLocation.latitude})`, location: `POINT(${selectedLocation.longitude} ${selectedLocation.latitude})`,
}; };
let { data: newLocation, error } = await this.supabase // Tambahkan ke array, bukan langsung create ke database
.from('locations') locationsToCreate.push(locationData);
.insert([locationData])
.select();
if (error) {
console.error(
`Error inserting into locations for district ${district.name} (${crime.year}):`,
error
);
return [];
}
const location = await this.prisma.locations.findFirst({
where: {
event_id: event.id,
district_id: district.id,
address: randomAddress,
},
select: {
id: true,
address: true,
},
});
if (!location) {
console.error(
`Location not found for district ${district.name} (${crime.year}), skipping.`
);
return [];
}
// Generate a unique ID for the incident
const incidentId = await generateIdWithDbCounter( const incidentId = await generateIdWithDbCounter(
'crime_incidents', 'crime_incidents',
{ {
@ -527,7 +528,10 @@ export class CrimeIncidentsSeeder {
// Determine status based on crime_cleared // Determine status based on crime_cleared
// If i < crimesCleared, this incident is resolved, otherwise unresolved // If i < crimesCleared, this incident is resolved, otherwise unresolved
const status = i < crimesCleared ? 'resolved' : 'unresolved'; const status =
i < crimesCleared
? ('resolved' as crime_status)
: ('unresolved' as crime_status);
// More detailed location descriptions // More detailed location descriptions
const locs = [ const locs = [
@ -551,13 +555,13 @@ export class CrimeIncidentsSeeder {
const randomLocation = locs[Math.floor(Math.random() * locs.length)]; const randomLocation = locs[Math.floor(Math.random() * locs.length)];
const descriptions = [ const descriptions = [
`Kasus ${randomCategory.name.toLowerCase()} ${location.address}`, `Kasus ${randomCategory.name.toLowerCase()} ${randomAddress}`,
`Laporan ${randomCategory.name.toLowerCase()} terjadi pada ${timestamp} ${randomLocation}`, `Laporan ${randomCategory.name.toLowerCase()} terjadi pada ${timestamp} ${randomLocation}`,
`${randomCategory.name} dilaporkan ${randomLocation}`, `${randomCategory.name} dilaporkan ${randomLocation}`,
`Insiden ${randomCategory.name.toLowerCase()} terjadi ${randomLocation}`, `Insiden ${randomCategory.name.toLowerCase()} terjadi ${randomLocation}`,
`Kejadian ${randomCategory.name.toLowerCase()} ${randomLocation}`, `Kejadian ${randomCategory.name.toLowerCase()} ${randomLocation}`,
`${randomCategory.name} terdeteksi ${randomLocation} pada ${timestamp.toLocaleTimeString()}`, `${randomCategory.name} terdeteksi ${randomLocation} pada ${timestamp.toLocaleTimeString()}`,
`Pelaporan ${randomCategory.name.toLowerCase()} di ${location.address}`, `Pelaporan ${randomCategory.name.toLowerCase()} di ${randomAddress}`,
`Kasus ${randomCategory.name.toLowerCase()} terjadi di ${streetName}`, `Kasus ${randomCategory.name.toLowerCase()} terjadi di ${streetName}`,
`${randomCategory.name} terjadi di dekat ${placeType.toLowerCase()} ${district.name}`, `${randomCategory.name} terjadi di dekat ${placeType.toLowerCase()} ${district.name}`,
`Insiden ${randomCategory.name.toLowerCase()} dilaporkan warga setempat ${randomLocation}`, `Insiden ${randomCategory.name.toLowerCase()} dilaporkan warga setempat ${randomLocation}`,
@ -566,23 +570,67 @@ export class CrimeIncidentsSeeder {
const randomDescription = const randomDescription =
descriptions[Math.floor(Math.random() * descriptions.length)]; descriptions[Math.floor(Math.random() * descriptions.length)];
// Create the crime incident incidentsToCreate.push({
const incident = await this.prisma.crime_incidents.create({ id: incidentId,
data: { crime_id: crime.id,
id: incidentId, crime_category_id: randomCategory.id,
crime_id: crime.id, location_id: undefined as string | undefined, // This will be updated after locations are created
crime_category_id: randomCategory.id, description: randomDescription,
location_id: location.id, victim_count: 0,
description: randomDescription, status: status,
victim_count: 0, timestamp: timestamp,
status: status,
timestamp: timestamp,
},
}); });
incidentsCreated.push(incident);
} }
// Batch insert locations in chunks
try {
await this.chunkedInsertLocations(locationsToCreate);
} catch (error) {
console.error(
`Error inserting into locations for district ${district.name} (${crime.year}):`,
error
);
return [];
}
// Fetch all created locations for this batch
const createdLocations = await this.prisma.locations.findMany({
where: {
event_id: event.id,
district_id: district.id,
address: {
in: locationsToCreate
.map((loc) => loc.address)
.filter((address): address is string => address !== undefined),
},
},
select: {
id: true,
address: true,
},
});
// Map addresses to location IDs
const addressToId = new Map<string, string>();
for (const loc of createdLocations) {
if (loc.address !== null) {
addressToId.set(loc.address, loc.id);
}
}
// Assign location_id to each incident
for (let i = 0; i < incidentsToCreate.length; i++) {
const address = locationsToCreate[i].address;
if (typeof address === 'string') {
incidentsToCreate[i].location_id = addressToId.get(address);
}
}
// Batch insert incidents in chunks
await this.chunkedInsertIncidents(incidentsToCreate);
incidentsCreated.push(...incidentsToCreate);
return incidentsCreated; return incidentsCreated;
} }
} }

View File

@ -10,6 +10,22 @@ import path from 'path';
import { parse } from 'csv-parse/sync'; import { parse } from 'csv-parse/sync';
import { generateId, generateIdWithDbCounter } from '../../app/_utils/common'; import { generateId, generateIdWithDbCounter } from '../../app/_utils/common';
interface ICreateUser {
id: string;
email: string;
roles_id: string;
confirmed_at: Date | null;
email_confirmed_at: Date | null;
last_sign_in_at: Date | null;
phone: string | null;
updated_at: Date | null;
created_at: Date | null;
app_metadata: any;
invited_at: Date | null;
user_metadata: any;
is_anonymous: boolean;
}
export class CrimesSeeder { export class CrimesSeeder {
constructor(private prisma: PrismaClient) {} constructor(private prisma: PrismaClient) {}
@ -22,6 +38,10 @@ export class CrimesSeeder {
// Create test user // Create test user
const user = await this.createUsers(); const user = await this.createUsers();
if (!user) {
throw new Error('Failed to create user');
}
// Create 5 events // Create 5 events
const events = await this.createEvents(user); const events = await this.createEvents(user);
@ -47,7 +67,7 @@ export class CrimesSeeder {
private async createUsers() { private async createUsers() {
// Check if test users already exist // Check if test users already exist
const existingUser = await this.prisma.users.findFirst({ const existingUser = await this.prisma.users.findFirst({
where: { email: 'admin@sigap.id' }, where: { email: 'sigapcompany@gmail.com' },
}); });
if (existingUser) { if (existingUser) {
@ -69,28 +89,35 @@ export class CrimesSeeder {
}); });
} }
// Create test users // Create test user directly with Prisma (no Supabase)
const user = await this.prisma.users.create({ const newUser = await this.prisma.users.create({
data: { data: {
email: `admin@gmail.com`, email: 'sigapcompany@gmail.com',
roles_id: roleId.id, roles_id: roleId.id,
confirmed_at: new Date(), confirmed_at: new Date(),
email_confirmed_at: new Date(), email_confirmed_at: new Date(),
last_sign_in_at: null,
phone: null,
updated_at: new Date(),
created_at: new Date(),
app_metadata: {},
invited_at: null,
user_metadata: {},
is_anonymous: false, is_anonymous: false,
profile: { profile: {
create: { create: {
first_name: 'Admin', first_name: 'Admin',
last_name: 'Sigap', last_name: 'Sigap',
username: 'admin', username: 'adminsigap',
}, },
}, },
}, },
}); });
return user; return newUser;
} }
private async createEvents(user: users) { private async createEvents(user: ICreateUser) {
// Check if events already exist // Check if events already exist
const existingEvent = await this.prisma.events.findFirst({ const existingEvent = await this.prisma.events.findFirst({
where: { where: {
@ -114,7 +141,7 @@ export class CrimesSeeder {
return event; return event;
} }
private async createSessions(user: users, events: events) { private async createSessions(user: ICreateUser, events: events) {
// Check if sessions already exist // Check if sessions already exist
const existingSession = await this.prisma.sessions.findFirst(); const existingSession = await this.prisma.sessions.findFirst();
@ -134,6 +161,16 @@ export class CrimesSeeder {
return newSession; return newSession;
} }
// Helper function for chunked insertion (with default chunk size 500)
private async chunkedCreateMany(data: any[], chunkSize: number = 500) {
for (let i = 0; i < data.length; i += chunkSize) {
const chunk = data.slice(i, i + chunkSize);
await this.prisma.crimes.createMany({
data: chunk,
});
}
}
private async importMonthlyCrimeData() { private async importMonthlyCrimeData() {
console.log('Importing monthly crime data...'); console.log('Importing monthly crime data...');
@ -160,7 +197,10 @@ export class CrimesSeeder {
// Store unique district IDs to avoid duplicates // Store unique district IDs to avoid duplicates
const processedDistricts = new Set<string>(); const processedDistricts = new Set<string>();
// Import records // Prepare batch data
const crimesData = [];
// Process records
for (const record of records) { for (const record of records) {
const crimeRate = record.level.toLowerCase() as crime_rates; const crimeRate = record.level.toLowerCase() as crime_rates;
@ -193,25 +233,26 @@ export class CrimesSeeder {
/(\d{4})(?=-\d{4}$)/ // Pattern to extract the 4-digit counter /(\d{4})(?=-\d{4}$)/ // Pattern to extract the 4-digit counter
); );
console.log('Creating crime ID:', crimeId); // console.log('Creating crime ID:', crimeId);
await this.prisma.crimes.create({ crimesData.push({
data: { id: crimeId,
id: crimeId, district_id: record.district_id,
district_id: record.district_id, level: crimeRate,
level: crimeRate, method: record.method || null,
method: record.method || null, month: parseInt(record.month_num),
month: parseInt(record.month_num), year: parseInt(record.year),
year: parseInt(record.year), number_of_crime: parseInt(record.number_of_crime),
number_of_crime: parseInt(record.number_of_crime), score: parseFloat(record.score),
score: parseFloat(record.score),
},
}); });
// Keep track of unique districts for later creation of crime incidents // Keep track of unique districts for later creation of crime incidents
processedDistricts.add(record.district_id); processedDistricts.add(record.district_id);
} }
// Batch create all crimes in chunks
await this.chunkedCreateMany(crimesData);
console.log(`Imported ${records.length} monthly crime records.`); console.log(`Imported ${records.length} monthly crime records.`);
} }
@ -241,7 +282,10 @@ export class CrimesSeeder {
skip_empty_lines: true, skip_empty_lines: true,
}); });
// Import records // Prepare batch data
const crimesData = [];
// Process records
for (const record of records) { for (const record of records) {
const crimeRate = record.level.toLowerCase() as crime_rates; const crimeRate = record.level.toLowerCase() as crime_rates;
const year = parseInt(record.year); const year = parseInt(record.year);
@ -261,22 +305,6 @@ export class CrimesSeeder {
continue; continue;
} }
// Create a unique ID for yearly crime data
// const crimeId = await generateId({
// prefix: 'CR',
// segments: {
// codes: [city.id],
// sequentialDigits: 4,
// year,
// },
// format: '{prefix}-{codes}-{sequence}-{year}',
// separator: '-',
// randomSequence: false,
// uniquenessStrategy: 'counter',
// storage: 'database',
// tableName: 'crimes',
// });
const crimeId = await generateIdWithDbCounter( const crimeId = await generateIdWithDbCounter(
'crimes', 'crimes',
{ {
@ -293,20 +321,21 @@ export class CrimesSeeder {
/(\d{4})(?=-\d{4}$)/ // Pattern to extract the 4-digit counter /(\d{4})(?=-\d{4}$)/ // Pattern to extract the 4-digit counter
); );
await this.prisma.crimes.create({ crimesData.push({
data: { id: crimeId,
id: crimeId, district_id: record.district_id,
district_id: record.district_id, level: crimeRate,
level: crimeRate, method: record.method || 'kmeans',
method: record.method || 'kmeans', month: null,
month: null, year: year,
year: year, number_of_crime: parseInt(record.number_of_crime),
number_of_crime: parseInt(record.number_of_crime), score: parseInt(record.score),
score: parseInt(record.score),
},
}); });
} }
// Batch create all yearly crimes in chunks
await this.chunkedCreateMany(crimesData);
console.log(`Imported ${records.length} yearly crime records.`); console.log(`Imported ${records.length} yearly crime records.`);
} }
@ -336,6 +365,9 @@ export class CrimesSeeder {
skip_empty_lines: true, skip_empty_lines: true,
}); });
// Prepare batch data
const crimesData = [];
for (const record of records) { for (const record of records) {
const crimeRate = record.level.toLowerCase() as crime_rates; const crimeRate = record.level.toLowerCase() as crime_rates;
const districtId = record.district_id; const districtId = record.district_id;
@ -371,20 +403,21 @@ export class CrimesSeeder {
/(\d{4})$/ // Pattern to extract the 4-digit counter at the end /(\d{4})$/ // Pattern to extract the 4-digit counter at the end
); );
await this.prisma.crimes.create({ crimesData.push({
data: { id: crimeId,
id: crimeId, district_id: districtId,
district_id: districtId, level: crimeRate,
level: crimeRate, method: 'kmeans',
method: 'kmeans', month: null,
month: null, year: null,
year: null, number_of_crime: parseInt(record.crime_total),
number_of_crime: parseInt(record.crime_total), score: parseFloat(record.avg_score),
score: parseFloat(record.avg_score),
},
}); });
} }
// Batch create all all-year summaries in chunks
await this.chunkedCreateMany(crimesData);
console.log(`Imported ${records.length} all-year crime summaries.`); console.log(`Imported ${records.length} all-year crime summaries.`);
} }
} }

View File

@ -14,7 +14,6 @@ export class DemographicsSeeder {
await this.prisma.demographics.deleteMany({}); await this.prisma.demographics.deleteMany({});
const districts = await this.prisma.districts.findMany(); const districts = await this.prisma.districts.findMany();
// await this.prisma.$executeRaw`TRUNCATE TABLE "demographics" CASCADE`;
// Load Excel // Load Excel
const filePath = path.join( const filePath = path.join(
@ -27,6 +26,12 @@ export class DemographicsSeeder {
let counter = 0; let counter = 0;
// Get all district land areas in a single query at the beginning
const districtLandAreas = await this.getAllDistrictLandAreas();
// Collect demographic data to be inserted in batch
const demographicsToInsert = [];
for (const row of data) { for (const row of data) {
const districtName = String(row['Kecamatan']).trim(); const districtName = String(row['Kecamatan']).trim();
const year = Number(row['Tahun']); const year = Number(row['Tahun']);
@ -42,28 +47,29 @@ export class DemographicsSeeder {
continue; continue;
} }
const districtLandArea = await this.getDistrictLandArea(district.id); const districtLandArea = districtLandAreas[district.id] || 0;
const populationDensity = const populationDensity =
districtLandArea > 0 ? population / districtLandArea : 0; districtLandArea > 0 ? population / districtLandArea : 0;
await this.prisma.demographics.create({ demographicsToInsert.push({
data: { district_id: district.id,
district_id: district.id, year,
year, population,
population, population_density: populationDensity,
population_density: populationDensity, number_of_unemployed: unemployed,
number_of_unemployed: unemployed,
},
}); });
counter++; counter++;
console.log(
`Seeding demographic data for district: ${districtName}, year: ${year}`
);
} }
// Insert all demographic data at once
await this.prisma.demographics.createMany({
data: demographicsToInsert,
skipDuplicates: true,
});
console.log(`${counter} demographic records prepared for batch insertion`);
console.log(`${counter} demographic records seeded from Excel`); console.log(`${counter} demographic records seeded from Excel`);
} }
@ -71,20 +77,21 @@ export class DemographicsSeeder {
return Math.random() * (max - min) + min; return Math.random() * (max - min) + min;
} }
private async getDistrictLandArea(districtId: string): Promise<number> { // Get all district land areas at once to avoid multiple database queries
const geo = await this.prisma.geographics.findFirst({ private async getAllDistrictLandAreas(): Promise<Record<string, number>> {
where: { district_id: districtId }, const geoData = await this.prisma.geographics.findMany({
select: { select: {
district_id: true,
land_area: true, land_area: true,
}, }
}); });
if (!geo) { const landAreas: Record<string, number> = {};
console.error(`⚠️ Land area not found for district ID: ${districtId}`); geoData.forEach(geo => {
return 0; landAreas[geo.district_id] = geo.land_area || 0;
} });
return geo.land_area || 0; return landAreas;
} }
private async getCityLandArea(): Promise<number> { private async getCityLandArea(): Promise<number> {

View File

@ -29,6 +29,7 @@ interface DistrictAreaData {
export class GeoJSONSeeder { export class GeoJSONSeeder {
private mapboxToken: string; private mapboxToken: string;
private areaData: DistrictAreaData = {}; private areaData: DistrictAreaData = {};
private BATCH_SIZE = 20; // Set a smaller batch size to prevent timeout
constructor( constructor(
private prisma: PrismaClient, private prisma: PrismaClient,
@ -102,6 +103,61 @@ export class GeoJSONSeeder {
return 0; return 0;
} }
/**
* Split array into chunks of the specified size
*/
private chunkArray<T>(array: T[], chunkSize: number): T[][] {
const chunks = [];
for (let i = 0; i < array.length; i += chunkSize) {
chunks.push(array.slice(i, i + chunkSize));
}
return chunks;
}
/**
* Insert data in smaller batches to avoid timeout
*/
private async insertInBatches(data: any[]): Promise<void> {
const batches = this.chunkArray(data, this.BATCH_SIZE);
console.log(
`Splitting ${data.length} records into ${batches.length} batches of max ${this.BATCH_SIZE} records`
);
for (let i = 0; i < batches.length; i++) {
const batch = batches[i];
console.log(
`Processing batch ${i + 1}/${batches.length} (${batch.length} records)`
);
const { error } = await this.supabase
.from('geographics')
.insert(batch)
.select();
if (error) {
console.error(`Error inserting batch ${i + 1}:`, error);
// Optionally reduce batch size and retry for this specific batch
if (batch.length > 5) {
console.log(`Retrying batch ${i + 1} with smaller chunks...`);
await this.insertInBatches(batch); // Recursive retry with automatic smaller chunks
} else {
console.error(
`Failed to insert items even with small batch size:`,
batch
);
}
} else {
console.log(
`Successfully inserted batch ${i + 1} (${batch.length} records)`
);
}
// Add a small delay between batches to reduce database load
await new Promise((resolve) => setTimeout(resolve, 500));
}
}
async run(): Promise<void> { async run(): Promise<void> {
console.log('Seeding GeoJSON data...'); console.log('Seeding GeoJSON data...');
@ -124,7 +180,6 @@ export class GeoJSONSeeder {
for (const feature of regencyGeoJson.features) { for (const feature of regencyGeoJson.features) {
const properties = feature.properties; const properties = feature.properties;
const geometry = feature.geometry;
// Cleanup code // Cleanup code
const regencyCode = properties.kode_kk.replace(/\./g, '').trim(); const regencyCode = properties.kode_kk.replace(/\./g, '').trim();
@ -137,41 +192,67 @@ export class GeoJSONSeeder {
}, },
}); });
// 2. Loop Semua District di GeoJSON // Prepare arrays for batch operations
for (const feature of districtGeoJson.features) { const districtsToCreate = [];
const geographicsToCreate = [];
const addressPromises = [];
const years = [2020, 2021, 2022, 2023, 2024];
// 2. Process all districts first to prepare data
for (let i = 0; i < districtGeoJson.features.length; i++) {
const feature = districtGeoJson.features[i];
const properties = feature.properties; const properties = feature.properties;
const geometry = feature.geometry;
// Cleanup code // Cleanup code
const districtCode = properties.kode_kec.replace(/\./g, '').trim(); const districtCode = properties.kode_kec.replace(/\./g, '').trim();
// Insert District // Add to districts batch
const district = await this.prisma.districts.create({ districtsToCreate.push({
data: { id: districtCode,
id: districtCode, name: properties.kecamatan,
name: properties.kecamatan, city_id: regency.id,
city_id: regency.id,
},
}); });
console.log(`Inserted district: ${district.name}`); // Calculate centroid for each district
// Buat Location satu kali untuk district (tidak dalam loop)
const centroid = turf.centroid(feature); const centroid = turf.centroid(feature);
const [longitude, latitude] = centroid.geometry.coordinates; const [longitude, latitude] = centroid.geometry.coordinates;
const address = await this.getStreetFromMapbox(longitude, latitude); // Create address lookup promise for this district
addressPromises.push(this.getStreetFromMapbox(longitude, latitude));
}
// Insert locations for each year with appropriate land area // 3. Insert all districts at once
const years = [2020, 2021, 2022, 2023, 2024]; await this.prisma.districts.createMany({
data: districtsToCreate,
skipDuplicates: true,
});
console.log(`Inserted ${districtsToCreate.length} districts in batch`);
// 4. Get all addresses in parallel
const addresses = await Promise.all(addressPromises);
// 5. Prepare geographic data for batch insertion
for (let i = 0; i < districtGeoJson.features.length; i++) {
const feature = districtGeoJson.features[i];
const properties = feature.properties;
const geometry = feature.geometry;
const districtCode = properties.kode_kec.replace(/\./g, '').trim();
const districtName = properties.kecamatan;
const address = addresses[i];
// Calculate centroid
const centroid = turf.centroid(feature);
const [longitude, latitude] = centroid.geometry.coordinates;
// Create geographic entries for each year
for (const year of years) { for (const year of years) {
const landArea = this.getDistrictArea(district.name, year); const landArea = this.getDistrictArea(districtName, year);
// Create location data for this district and year // Add to geographics batch
const locationData: CreateLocationDto = { geographicsToCreate.push({
district_id: district.id, district_id: districtCode,
description: `Location for ${district.name} District in Jember (${year})`, description: `Location for ${districtName} District in Jember (${year})`,
address: address, address: address,
type: 'district location', type: 'district location',
year: year, year: year,
@ -181,26 +262,15 @@ export class GeoJSONSeeder {
polygon: geometry, polygon: geometry,
geometry: geometry, geometry: geometry,
location: `POINT(${longitude} ${latitude})`, location: `POINT(${longitude} ${latitude})`,
}; });
const { error } = await this.supabase
.from('geographics')
.insert([locationData])
.select();
if (error) {
console.error(
`Error inserting into locations for district ${district.name} (${year}):`,
error
);
continue;
}
console.log(
`Inserted geographic data for: ${district.name} (${year}) with area: ${landArea} sq km`
);
} }
} }
// 6. Insert all geographic data in smaller batches
console.log(
`Preparing to insert ${geographicsToCreate.length} geographic records in smaller batches`
);
await this.insertInBatches(geographicsToCreate);
} }
console.log( console.log(

View File

@ -1,90 +1,97 @@
import { PrismaClient } from "@prisma/client"; import { PrismaClient } from "@prisma/client";
export class PermissionSeeder { export class PermissionSeeder {
constructor(private prisma: PrismaClient) { } constructor(private prisma: PrismaClient) {}
async run(): Promise<void> { async run(): Promise<void> {
console.log('Seeding permissions...'); console.log('Seeding permissions...');
// Delete existing permissions to avoid duplicates // Delete existing permissions to avoid duplicates
await this.prisma.permissions.deleteMany({}); await this.prisma.permissions.deleteMany({});
try { try {
// Fetch all resources and roles // Fetch all resources and roles
const allResources = await this.prisma.resources.findMany(); const allResources = await this.prisma.resources.findMany();
const adminRole = await this.prisma.roles.findUnique({ const adminRole = await this.prisma.roles.findUnique({
where: { name: 'admin' }, where: { name: 'admin' },
}); });
const viewerRole = await this.prisma.roles.findUnique({ const viewerRole = await this.prisma.roles.findUnique({
where: { name: 'viewer' }, where: { name: 'viewer' },
}); });
const staffRole = await this.prisma.roles.findUnique({ const staffRole = await this.prisma.roles.findUnique({
where: { name: 'staff' }, where: { name: 'staff' },
}); });
if (!adminRole || !viewerRole || !staffRole) { if (!adminRole || !viewerRole || !staffRole) {
console.error('Roles not found. Please seed roles first.'); console.error('Roles not found. Please seed roles first.');
return; return;
} }
// Admin permissions - full access to all resources // Admin permissions - full access to all resources
for (const resource of allResources) { for (const resource of allResources) {
await this.createPermissions(adminRole.id, resource.id, [ await this.createPermissions(adminRole.id, resource.id, [
'create',
'read',
'update',
'delete',
]);
}
// Viewer permissions - read-only access to all resources
for (const resource of allResources) {
await this.createPermissions(viewerRole.id, resource.id, ['read']);
}
// Staff permissions - mixed permissions based on resource
for (const resource of allResources) {
if (
['roles', 'permissions', 'resources', 'users'].includes(resource.name)
) {
// Staff can only read roles, permissions, resources, and users
await this.createPermissions(staffRole.id, resource.id, ['read']);
} else {
// Staff can create, read, update but not delete other resources
await this.createPermissions(staffRole.id, resource.id, [
'create', 'create',
'read', 'read',
'update', 'update',
'delete',
]); ]);
} }
// Viewer permissions - read-only access to all resources
for (const resource of allResources) {
await this.createPermissions(viewerRole.id, resource.id, ['read']);
}
// Staff permissions - mixed permissions based on resource
for (const resource of allResources) {
if (
['roles', 'permissions', 'resources', 'users'].includes(
resource.name
)
) {
// Staff can only read roles, permissions, resources, and users
await this.createPermissions(staffRole.id, resource.id, ['read']);
} else {
// Staff can create, read, update but not delete other resources
await this.createPermissions(staffRole.id, resource.id, [
'create',
'read',
'update',
]);
}
}
console.log('Permissions seeded successfully!');
} catch (error) {
console.error('Error seeding permissions:', error);
} }
console.log('Permissions seeded successfully!');
} catch (error) {
console.error('Error seeding permissions:', error);
} }
}
private async createPermissions(roleId: string, resourceId: string, actions: string[]) { private async createPermissions(
for (const action of actions) { roleId: string,
try { resourceId: string,
const permission = await this.prisma.permissions.createMany({ actions: string[]
data: { ) {
action: action, try {
resource_id: resourceId, // Prepare all permissions at once
role_id: roleId, const permissionsData = actions.map((action) => ({
}, action: action,
skipDuplicates: true, // Skip if the permission already exists resource_id: resourceId,
}); role_id: roleId,
}));
// Create all permissions in a single batch operation
const result = await this.prisma.permissions.createMany({
data: permissionsData,
skipDuplicates: true, // Skip if the permission already exists
});
console.log(`Created permission: ${action} for role ${roleId} on resource ${resourceId}`); console.log(
`Created ${result.count} permissions for role ${roleId} on resource ${resourceId}: ${actions.join(', ')}`
} catch (error) { );
console.error(`Error creating permission for role ${roleId} on resource ${resourceId}:`, error); } catch (error) {
} console.error(
} `Error creating permissions for role ${roleId} on resource ${resourceId}:`,
error
);
} }
}
} }

View File

@ -79,41 +79,6 @@ export class UnitSeeder {
return; return;
} }
const location = await this.getUnitsLocation(city.name);
if (!location) {
console.warn(`No location found for city: ${city.name}`);
return;
}
const [lng, lat] = [location.lng, location.lat];
const address = location.address;
const phone = location.telepon?.replace(/-/g, '');
const newId = await generateIdWithDbCounter('units', {
prefix: 'UT',
segments: {
sequentialDigits: 4,
},
format: '{prefix}-{sequence}',
separator: '-',
uniquenessStrategy: 'counter',
});
let locationData: CreateLocationDto = {
district_id: city.districts[0].id, // This will be replaced with Patrang's ID
city_id: city.id,
code_unit: newId,
name: `Polres ${city.name}`,
description: `Unit ${city.name} is categorized as POLRES and operates in the ${city.name} area.`,
type: 'polres',
address,
phone,
longitude: lng,
latitude: lat,
location: `POINT(${lng} ${lat})`,
};
// Find the Patrang district // Find the Patrang district
const patrangDistrict = await this.prisma.districts.findFirst({ const patrangDistrict = await this.prisma.districts.findFirst({
where: { where: {
@ -127,33 +92,18 @@ export class UnitSeeder {
return; return;
} }
locationData.district_id = patrangDistrict.id; // Prepare arrays for batch operations
const unitsToInsert = [];
const { error } = await this.supabase // First, get the Polres unit data
.from('units') const polresLocation = await this.getUnitsLocation(city.name);
.insert([locationData])
.select();
if (error) { if (polresLocation) {
console.error(`Error inserting into Supabase locations:`, error); const [lng, lat] = [polresLocation.lng, polresLocation.lat];
return; const address = polresLocation.address;
} const phone = polresLocation.telepon?.replace(/-/g, '');
let district; const polresId = await generateIdWithDbCounter('units', {
for (district of districts) {
const location = await this.getUnitsLocation(district.name);
if (!location) {
console.warn(`No location found for district: ${district.name}`);
continue;
}
const [lng, lat] = [location.lng, location.lat];
const address = location.address;
const phone = location.telepon?.replace(/-/g, '');
const newId = await generateIdWithDbCounter('units', {
prefix: 'UT', prefix: 'UT',
segments: { segments: {
sequentialDigits: 4, sequentialDigits: 4,
@ -163,7 +113,63 @@ export class UnitSeeder {
uniquenessStrategy: 'counter', uniquenessStrategy: 'counter',
}); });
const locationData: CreateLocationDto = { unitsToInsert.push({
district_id: patrangDistrict.id,
city_id: city.id,
code_unit: polresId,
name: `Polres ${city.name}`,
description: `Unit ${city.name} is categorized as POLRES and operates in the ${city.name} area.`,
type: 'polres',
address,
phone,
longitude: lng,
latitude: lat,
location: `POINT(${lng} ${lat})`,
});
} else {
console.warn(`No location found for city: ${city.name}`);
}
// Now prepare data for all Polseks
const locationPromises = districts.map((district) =>
this.getUnitsLocation(district.name)
.then((location) => ({ district, location }))
.catch(() => ({ district, location: null }))
);
// Wait for all location lookups to complete
const results = await Promise.all(locationPromises);
// Generate all IDs upfront
const idPromises = Array(results.length)
.fill(0)
.map(() =>
generateIdWithDbCounter('units', {
prefix: 'UT',
segments: {
sequentialDigits: 4,
},
format: '{prefix}-{sequence}',
separator: '-',
uniquenessStrategy: 'counter',
})
);
const ids = await Promise.all(idPromises);
// Process results and add to unitsToInsert
results.forEach(({ district, location }, index) => {
if (!location) {
console.warn(`No location found for district: ${district.name}`);
return;
}
const [lng, lat] = [location.lng, location.lat];
const address = location.address;
const phone = location.telepon?.replace(/-/g, '');
const newId = ids[index];
unitsToInsert.push({
district_id: district.id, district_id: district.id,
city_id: district.city_id, city_id: district.city_id,
code_unit: newId, code_unit: newId,
@ -175,21 +181,29 @@ export class UnitSeeder {
longitude: lng, longitude: lng,
latitude: lat, latitude: lat,
location: `POINT(${lng} ${lat})`, location: `POINT(${lng} ${lat})`,
}; });
console.log(
`Prepared unit data for district: ${district.name}, ID: ${newId}`
);
});
// Insert all units in a single batch operation
if (unitsToInsert.length > 0) {
const { error } = await this.supabase const { error } = await this.supabase
.from('units') .from('units')
.insert([locationData]) .insert(unitsToInsert)
.select(); .select();
if (error) { if (error) {
console.error(`Error inserting into Supabase locations:`, error); console.error(`Error batch inserting units into Supabase:`, error);
continue; } else {
console.log(
`Successfully inserted ${unitsToInsert.length} units in batch`
);
} }
} else {
console.log( console.warn('No unit data to insert');
`Inserted unit for district: ${district.name}, newId: ${newId} at ${lng}, ${lat}`
);
} }
} }

View File

@ -105,9 +105,9 @@ file_size_limit = "50MiB"
enabled = true enabled = true
# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used # The base URL of your website. Used as an allow-list for redirects and for constructing URLs used
# in emails. # in emails.
site_url = "https://127.0.0.1:3000" site_url = "http://127.0.0.1:3000"
# A list of *exact* URLs that auth providers are permitted to redirect to post authentication. # A list of *exact* URLs that auth providers are permitted to redirect to post authentication.
additional_redirect_urls = [] additional_redirect_urls = ["https://127.0.0.1:3000"]
# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week). # How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week).
jwt_expiry = 3600 jwt_expiry = 3600
# If disabled, the refresh token will never expire. # If disabled, the refresh token will never expire.
@ -122,7 +122,7 @@ enable_anonymous_sign_ins = false
# Allow/disallow testing manual linking of accounts # Allow/disallow testing manual linking of accounts
enable_manual_linking = false enable_manual_linking = false
# Passwords shorter than this value will be rejected as weak. Minimum 6, recommended 8 or more. # Passwords shorter than this value will be rejected as weak. Minimum 6, recommended 8 or more.
minimum_password_length = 8 minimum_password_length = 6
# Passwords that do not meet the following requirements will be rejected as weak. Supported values # Passwords that do not meet the following requirements will be rejected as weak. Supported values
# are: `letters_digits`, `lower_upper_letters_digits`, `lower_upper_letters_digits_symbols` # are: `letters_digits`, `lower_upper_letters_digits`, `lower_upper_letters_digits_symbols`
password_requirements = "" password_requirements = ""
@ -154,7 +154,7 @@ enable_signup = true
# addresses. If disabled, only the new email is required to confirm. # addresses. If disabled, only the new email is required to confirm.
double_confirm_changes = true double_confirm_changes = true
# If enabled, users need to confirm their email address before signing in. # If enabled, users need to confirm their email address before signing in.
enable_confirmations = true enable_confirmations = false
# If enabled, users will need to reauthenticate or have logged in recently to change their password. # If enabled, users will need to reauthenticate or have logged in recently to change their password.
secure_password_change = false secure_password_change = false
# Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email. # Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email.
@ -162,7 +162,7 @@ max_frequency = "1s"
# Number of characters used in the email OTP. # Number of characters used in the email OTP.
otp_length = 6 otp_length = 6
# Number of seconds before the email OTP expires (defaults to 1 hour). # Number of seconds before the email OTP expires (defaults to 1 hour).
otp_expiry = 300 otp_expiry = 3600
# Use a production-ready SMTP server # Use a production-ready SMTP server
# [auth.email.smtp] # [auth.email.smtp]
@ -220,8 +220,8 @@ max_enrolled_factors = 10
# Control MFA via App Authenticator (TOTP) # Control MFA via App Authenticator (TOTP)
[auth.mfa.totp] [auth.mfa.totp]
enroll_enabled = true enroll_enabled = false
verify_enabled = true verify_enabled = false
# Configure MFA via Phone Messaging # Configure MFA via Phone Messaging
[auth.mfa.phone] [auth.mfa.phone]

View File

@ -0,0 +1,7 @@
drop trigger if exists "on_auth_user_created" on "auth"."users";
drop trigger if exists "on_auth_user_deleted" on "auth"."users";
drop trigger if exists "on_auth_user_updated" on "auth"."users";

View File

@ -0,0 +1,203 @@
-- Restore function: public.generate_username
CREATE OR REPLACE FUNCTION public.generate_username(email text)
RETURNS text
LANGUAGE plpgsql SECURITY DEFINER
AS $$
DECLARE
result_username TEXT;
username_base TEXT;
random_number INTEGER;
username_exists BOOLEAN;
BEGIN
username_base := split_part(email, '@', 1);
username_base := regexp_replace(username_base, '[^a-zA-Z0-9]', '_', 'g');
random_number := floor(random() * 9900 + 100)::integer;
result_username := username_base || random_number;
LOOP
SELECT EXISTS(SELECT 1 FROM public.profiles WHERE username = result_username) INTO username_exists;
IF NOT username_exists THEN
EXIT;
END IF;
random_number := floor(random() * 9900 + 100)::integer;
result_username := username_base || random_number;
END LOOP;
RETURN result_username;
END;
$$;
-- Restore function: gis.update_land_area
CREATE OR REPLACE FUNCTION gis.update_land_area()
RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
NEW.land_area := ROUND((ST_Area(NEW.geometry::geography) / 1000000.0)::numeric, 2);
RETURN NEW;
END;
$$;
-- Restore function: public.update_land_area
CREATE OR REPLACE FUNCTION public.update_land_area()
RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
NEW.land_area := ROUND(ST_Area(NEW.geometry::gis.geography) / 1000000.0);
RETURN NEW;
END;
$$;
-- Restore function: public.update_timestamp
CREATE OR REPLACE FUNCTION public.update_timestamp()
RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
NEW.updated_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$;
-- Restore function: public.handle_new_user
CREATE OR REPLACE FUNCTION public.handle_new_user()
RETURNS trigger
LANGUAGE plpgsql SECURITY DEFINER
AS $$
DECLARE
role_id UUID;
BEGIN
SELECT id INTO role_id FROM public.roles WHERE name = 'viewer' LIMIT 1;
IF role_id IS NULL THEN
RAISE EXCEPTION 'Role not found';
END IF;
INSERT INTO public.users (
id,
roles_id,
email,
phone,
encrypted_password,
invited_at,
confirmed_at,
email_confirmed_at,
recovery_sent_at,
last_sign_in_at,
app_metadata,
user_metadata,
created_at,
updated_at,
banned_until,
is_anonymous
) VALUES (
NEW.id,
role_id,
NEW.email,
NEW.phone,
NEW.encrypted_password,
NEW.invited_at,
NEW.confirmed_at,
NEW.email_confirmed_at,
NEW.recovery_sent_at,
NEW.last_sign_in_at,
NEW.raw_app_meta_data,
NEW.raw_user_meta_data,
NEW.created_at,
NEW.updated_at,
NEW.banned_until,
NEW.is_anonymous
);
INSERT INTO public.profiles (
id,
user_id,
avatar,
username,
first_name,
last_name,
bio,
address,
birth_date
) VALUES (
gen_random_uuid(),
NEW.id,
NULL,
public.generate_username(NEW.email),
NULL,
NULL,
NULL,
NULL,
NULL
);
RETURN NEW;
END;
$$;
-- Restore function: public.handle_user_delete
CREATE OR REPLACE FUNCTION public.handle_user_delete()
RETURNS trigger
LANGUAGE plpgsql SECURITY DEFINER
AS $$
BEGIN
DELETE FROM public.profiles WHERE user_id = OLD.id;
DELETE FROM public.users WHERE id = OLD.id;
RETURN OLD;
END;
$$;
-- Restore function: public.handle_user_update
CREATE OR REPLACE FUNCTION public.handle_user_update()
RETURNS trigger
LANGUAGE plpgsql SECURITY DEFINER
AS $$
BEGIN
UPDATE public.users
SET
email = COALESCE(NEW.email, email),
phone = COALESCE(NEW.phone, phone),
encrypted_password = COALESCE(NEW.encrypted_password, encrypted_password),
invited_at = COALESCE(NEW.invited_at, invited_at),
confirmed_at = COALESCE(NEW.confirmed_at, confirmed_at),
email_confirmed_at = COALESCE(NEW.email_confirmed_at, email_confirmed_at),
recovery_sent_at = COALESCE(NEW.recovery_sent_at, recovery_sent_at),
last_sign_in_at = COALESCE(NEW.last_sign_in_at, last_sign_in_at),
app_metadata = COALESCE(NEW.raw_app_meta_data, app_metadata),
user_metadata = COALESCE(NEW.raw_user_meta_data, user_metadata),
created_at = COALESCE(NEW.created_at, created_at),
updated_at = NOW(),
banned_until = CASE
WHEN NEW.banned_until IS NULL THEN NULL
ELSE COALESCE(NEW.banned_until, banned_until)
END,
is_anonymous = COALESCE(NEW.is_anonymous, is_anonymous)
WHERE id = NEW.id;
INSERT INTO public.profiles (id, user_id)
SELECT gen_random_uuid(), NEW.id
WHERE NOT EXISTS (
SELECT 1 FROM public.profiles WHERE user_id = NEW.id
)
ON CONFLICT (user_id) DO NOTHING;
RETURN NEW;
END;
$$;
-- Function: public.handle_new_user()
-- Already exists in schema, so just create trigger
CREATE TRIGGER "on_auth_user_created"
AFTER INSERT ON "auth"."users"
FOR EACH ROW
EXECUTE FUNCTION public.handle_new_user();
-- Function: public.handle_user_delete()
-- Already exists in schema, so just create trigger
CREATE TRIGGER "on_auth_user_deleted"
AFTER DELETE ON "auth"."users"
FOR EACH ROW
EXECUTE FUNCTION public.handle_user_delete();
-- Function: public.handle_user_update()
-- Already exists in schema, so just create trigger
CREATE TRIGGER "on_auth_user_updated"
AFTER UPDATE ON "auth"."users"
FOR EACH ROW
EXECUTE FUNCTION public.handle_user_update();

View File

@ -0,0 +1,163 @@
-- drop type "gis"."geometry_dump";
-- drop type "gis"."valid_detail";
-- set check_function_bodies = off;
DROP FUNCTION IF EXISTS gis.calculate_unit_incident_distances(VARCHAR, VARCHAR);
DROP FUNCTION IF EXISTS gis.find_nearest_unit(character varying);
DROP FUNCTION IF EXISTS gis.find_units_within_distance(character varying, double precision);
-- DROP FUNCTION IF EXISTS gis.find_units_within_distance(character varying, double precision DEFAULT 5000);
CREATE OR REPLACE FUNCTION gis.calculate_unit_incident_distances(
p_unit_id VARCHAR,
p_district_id VARCHAR DEFAULT NULL
)
RETURNS TABLE (
unit_code VARCHAR,
incident_id VARCHAR,
district_name VARCHAR,
distance_meters FLOAT
) AS $$
BEGIN
RETURN QUERY
WITH unit_locations AS (
SELECT
u.code_unit,
u.district_id,
ST_SetSRID(ST_MakePoint(u.longitude, u.latitude), 4326)::gis.geography AS location
FROM
units u
WHERE
(p_unit_id IS NULL OR u.code_unit = p_unit_id)
AND (p_district_id IS NULL OR u.district_id = p_district_id)
AND u.latitude IS NOT NULL
AND u.longitude IS NOT NULL
),
incident_locations AS (
SELECT
ci.id,
ci.crime_id,
ci.crime_category_id,
ST_SetSRID(ST_MakePoint(l.longitude, l.latitude), 4326)::gis.geography AS location
FROM
crime_incidents ci
JOIN
locations l ON ci.location_id = l.id
WHERE
l.latitude IS NOT NULL
AND l.longitude IS NOT NULL
)
SELECT
ul.code_unit as unit_code,
il.id as incident_id,
d.name as district_name,
ST_Distance(ul.location, il.location) as distance_meters
FROM
unit_locations ul
JOIN
districts d ON ul.district_id = d.id
JOIN
crimes c ON c.district_id = d.id
JOIN
incident_locations il ON il.crime_id = c.id
ORDER BY
ul.code_unit,
ul.location <-> il.location;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION gis.find_nearest_unit(p_incident_id character varying)
RETURNS TABLE(unit_code character varying, unit_name character varying, distance_meters double precision)
LANGUAGE plpgsql
SECURITY DEFINER
AS $function$
BEGIN
RETURN QUERY
WITH incident_location AS (
SELECT
ci.id,
l.location AS location
FROM
crime_incidents ci
JOIN
locations l ON ci.location_id = l.id
WHERE
ci.id = p_incident_id
),
unit_locations AS (
SELECT
u.code_unit,
u.name,
u.location
FROM
units u
)
SELECT
ul.code_unit as unit_code,
ul.name as unit_name,
ST_Distance(ul.location, il.location) as distance_meters
FROM
unit_locations ul
CROSS JOIN
incident_location il
ORDER BY
ul.location <-> il.location
LIMIT 1;
END;
$function$
;
CREATE OR REPLACE FUNCTION gis.find_units_within_distance(p_incident_id character varying, p_max_distance_meters double precision DEFAULT 5000)
RETURNS TABLE(unit_code character varying, unit_name character varying, distance_meters double precision)
LANGUAGE plpgsql
SECURITY DEFINER
AS $function$
BEGIN
RETURN QUERY
WITH incident_location AS (
SELECT
ci.id,
l.location AS location
FROM
crime_incidents ci
JOIN
locations l ON ci.location_id = l.id
WHERE
ci.id = p_incident_id
),
unit_locations AS (
SELECT
u.code_unit,
u.name,
u.location
FROM
units u
)
SELECT
ul.code_unit as unit_code,
ul.name as unit_name,
ST_Distance(ul.location, il.location) as distance_meters
FROM
unit_locations ul
CROSS JOIN
incident_location il
WHERE
ST_DWithin(ul.location, il.location, p_max_distance_meters)
ORDER BY
ST_Distance(ul.location, il.location);
END;
$function$
;
-- create type "gis"."geometry_dump" as ("path" integer[], "geom" geometry);
-- create type "gis"."valid_detail" as ("valid" boolean, "reason" character varying, "location" geometry);

View File

@ -0,0 +1,163 @@
-- drop type "gis"."geometry_dump";
-- drop type "gis"."valid_detail";
-- set check_function_bodies = off;
DROP FUNCTION IF EXISTS gis.calculate_unit_incident_distances(VARCHAR, VARCHAR);
DROP FUNCTION IF EXISTS gis.find_nearest_unit(character varying);
DROP FUNCTION IF EXISTS gis.find_units_within_distance(character varying, double precision);
-- DROP FUNCTION IF EXISTS gis.find_units_within_distance(character varying, double precision DEFAULT 5000);
CREATE OR REPLACE FUNCTION gis.calculate_unit_incident_distances(
p_unit_id VARCHAR,
p_district_id VARCHAR DEFAULT NULL
)
RETURNS TABLE (
unit_code VARCHAR,
incident_id VARCHAR,
district_name VARCHAR,
distance_meters FLOAT
) AS $$
BEGIN
RETURN QUERY
WITH unit_locations AS (
SELECT
u.code_unit,
u.district_id,
ST_SetSRID(ST_MakePoint(u.longitude, u.latitude), 4326)::gis.geography AS location
FROM
units u
WHERE
(p_unit_id IS NULL OR u.code_unit = p_unit_id)
AND (p_district_id IS NULL OR u.district_id = p_district_id)
AND u.latitude IS NOT NULL
AND u.longitude IS NOT NULL
),
incident_locations AS (
SELECT
ci.id,
ci.crime_id,
ci.crime_category_id,
ST_SetSRID(ST_MakePoint(l.longitude, l.latitude), 4326)::gis.geography AS location
FROM
crime_incidents ci
JOIN
locations l ON ci.location_id = l.id
WHERE
l.latitude IS NOT NULL
AND l.longitude IS NOT NULL
)
SELECT
ul.code_unit as unit_code,
il.id as incident_id,
d.name as district_name,
ST_Distance(ul.location, il.location) as distance_meters
FROM
unit_locations ul
JOIN
districts d ON ul.district_id = d.id
JOIN
crimes c ON c.district_id = d.id
JOIN
incident_locations il ON il.crime_id = c.id
ORDER BY
ul.code_unit,
ul.location <-> il.location;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION gis.find_nearest_unit(p_incident_id character varying)
RETURNS TABLE(unit_code character varying, unit_name character varying, distance_meters double precision)
LANGUAGE plpgsql
SECURITY DEFINER
AS $function$
BEGIN
RETURN QUERY
WITH incident_location AS (
SELECT
ci.id,
l.location AS location
FROM
crime_incidents ci
JOIN
locations l ON ci.location_id = l.id
WHERE
ci.id = p_incident_id
),
unit_locations AS (
SELECT
u.code_unit,
u.name,
u.location
FROM
units u
)
SELECT
ul.code_unit as unit_code,
ul.name as unit_name,
ST_Distance(ul.location, il.location) as distance_meters
FROM
unit_locations ul
CROSS JOIN
incident_location il
ORDER BY
ul.location <-> il.location
LIMIT 1;
END;
$function$
;
CREATE OR REPLACE FUNCTION gis.find_units_within_distance(p_incident_id character varying, p_max_distance_meters double precision DEFAULT 5000)
RETURNS TABLE(unit_code character varying, unit_name character varying, distance_meters double precision)
LANGUAGE plpgsql
SECURITY DEFINER
AS $function$
BEGIN
RETURN QUERY
WITH incident_location AS (
SELECT
ci.id,
l.location AS location
FROM
crime_incidents ci
JOIN
locations l ON ci.location_id = l.id
WHERE
ci.id = p_incident_id
),
unit_locations AS (
SELECT
u.code_unit,
u.name,
u.location
FROM
units u
)
SELECT
ul.code_unit as unit_code,
ul.name as unit_name,
ST_Distance(ul.location, il.location) as distance_meters
FROM
unit_locations ul
CROSS JOIN
incident_location il
WHERE
ST_DWithin(ul.location, il.location, p_max_distance_meters)
ORDER BY
ST_Distance(ul.location, il.location);
END;
$function$
;
-- create type "gis"."geometry_dump" as ("path" integer[], "geom" geometry);
-- create type "gis"."valid_detail" as ("valid" boolean, "reason" character varying, "location" geometry);

View File

@ -0,0 +1,9 @@
-- drop type "gis"."geometry_dump";
-- drop type "gis"."valid_detail";
-- create type "gis"."geometry_dump" as ("path" integer[], "geom" geometry);
-- create type "gis"."valid_detail" as ("valid" boolean, "reason" character varying, "location" geometry);

View File

@ -0,0 +1,163 @@
-- drop type "gis"."geometry_dump";
-- drop type "gis"."valid_detail";
-- set check_function_bodies = off;
DROP FUNCTION IF EXISTS gis.calculate_unit_incident_distances(VARCHAR, VARCHAR);
DROP FUNCTION IF EXISTS gis.find_nearest_unit(character varying);
DROP FUNCTION IF EXISTS gis.find_units_within_distance(character varying, double precision);
-- DROP FUNCTION IF EXISTS gis.find_units_within_distance(character varying, double precision DEFAULT 5000);
CREATE OR REPLACE FUNCTION gis.calculate_unit_incident_distances(
p_unit_id VARCHAR,
p_district_id VARCHAR DEFAULT NULL
)
RETURNS TABLE (
unit_code VARCHAR,
incident_id VARCHAR,
district_name VARCHAR,
distance_meters FLOAT
) AS $$
BEGIN
RETURN QUERY
WITH unit_locations AS (
SELECT
u.code_unit,
u.district_id,
ST_SetSRID(ST_MakePoint(u.longitude, u.latitude), 4326)::gis.geography AS location
FROM
units u
WHERE
(p_unit_id IS NULL OR u.code_unit = p_unit_id)
AND (p_district_id IS NULL OR u.district_id = p_district_id)
AND u.latitude IS NOT NULL
AND u.longitude IS NOT NULL
),
incident_locations AS (
SELECT
ci.id,
ci.crime_id,
ci.crime_category_id,
ST_SetSRID(ST_MakePoint(l.longitude, l.latitude), 4326)::gis.geography AS location
FROM
crime_incidents ci
JOIN
locations l ON ci.location_id = l.id
WHERE
l.latitude IS NOT NULL
AND l.longitude IS NOT NULL
)
SELECT
ul.code_unit as unit_code,
il.id as incident_id,
d.name as district_name,
ST_Distance(ul.location, il.location) as distance_meters
FROM
unit_locations ul
JOIN
districts d ON ul.district_id = d.id
JOIN
crimes c ON c.district_id = d.id
JOIN
incident_locations il ON il.crime_id = c.id
ORDER BY
ul.code_unit,
ul.location <-> il.location;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION gis.find_nearest_unit(p_incident_id character varying)
RETURNS TABLE(unit_code character varying, unit_name character varying, distance_meters double precision)
LANGUAGE plpgsql
SECURITY DEFINER
AS $function$
BEGIN
RETURN QUERY
WITH incident_location AS (
SELECT
ci.id,
l.location AS location
FROM
crime_incidents ci
JOIN
locations l ON ci.location_id = l.id
WHERE
ci.id = p_incident_id
),
unit_locations AS (
SELECT
u.code_unit,
u.name,
u.location
FROM
units u
)
SELECT
ul.code_unit as unit_code,
ul.name as unit_name,
ST_Distance(ul.location, il.location) as distance_meters
FROM
unit_locations ul
CROSS JOIN
incident_location il
ORDER BY
ul.location <-> il.location
LIMIT 1;
END;
$function$
;
CREATE OR REPLACE FUNCTION gis.find_units_within_distance(p_incident_id character varying, p_max_distance_meters double precision DEFAULT 5000)
RETURNS TABLE(unit_code character varying, unit_name character varying, distance_meters double precision)
LANGUAGE plpgsql
SECURITY DEFINER
AS $function$
BEGIN
RETURN QUERY
WITH incident_location AS (
SELECT
ci.id,
l.location AS location
FROM
crime_incidents ci
JOIN
locations l ON ci.location_id = l.id
WHERE
ci.id = p_incident_id
),
unit_locations AS (
SELECT
u.code_unit,
u.name,
u.location
FROM
units u
)
SELECT
ul.code_unit as unit_code,
ul.name as unit_name,
ST_Distance(ul.location, il.location) as distance_meters
FROM
unit_locations ul
CROSS JOIN
incident_location il
WHERE
ST_DWithin(ul.location, il.location, p_max_distance_meters)
ORDER BY
ST_Distance(ul.location, il.location);
END;
$function$
;
-- create type "gis"."geometry_dump" as ("path" integer[], "geom" geometry);
-- create type "gis"."valid_detail" as ("valid" boolean, "reason" character varying, "location" geometry);

View File

@ -0,0 +1,9 @@
drop type "gis"."geometry_dump";
drop type "gis"."valid_detail";
create type "gis"."geometry_dump" as ("path" integer[], "geom" geometry);
create type "gis"."valid_detail" as ("valid" boolean, "reason" character varying, "location" geometry);