Press n or j to go to the next uncovered block, b, p or k for the previous block.
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 | 138x 138x 138x 138x 138x 41178x 41178x 138x 276x 276x 12x 12x 174x 174x 24x 24x 24x 24x 24x 36x 36x 36x 18x 18x 18x 18x 12x 12x 12x 276x 41178x 41178x 453x 453x 40227x 40227x 354x 354x 96x 96x 30x 30x 18x 18x 41178x 43436x 42687x 749x 722x 722x 722x 27x 43436x 2548x 102541x 117x 117x 117x 117x 117x 43436x 43436x 43436x 564x 43436x 117x 9x 117x 117x 573x 573x 446133x 446133x 446133x 42720x 403413x 403413x 403392x 835460x 403413x 2014844x 403413x 117x 189x 3x 186x 99x 3x 96x 96x 96x 3x 93x 87x 87x 15x 72x 1609x 1297x 312x 333x 333x 333x 312x 138x 174x 1609x 1609x 1609x 1609x 174x 174x 72x 102x 102x 174x | /**
* Gap Filler Utility
* Fills missing time series gaps in query results to ensure continuous data for charts.
* Follows Cube.js naming conventions (fillMissingDates) but implements server-side.
*/
import type { SemanticQuery } from './types/query'
import type { TimeGranularity } from './types/core'
export interface GapFillerConfig {
/** The time dimension key (e.g., 'Sales.date') */
timeDimensionKey: string
/** Time granularity for bucket generation */
granularity: TimeGranularity
/** Date range [start, end] */
dateRange: [Date, Date]
/** Value to fill for missing measures (default: 0) */
fillValue: number | null
/** List of measure keys in the data */
measures: string[]
/** List of dimension keys in the data (excluding time dimensions) */
dimensions: string[]
}
/**
* Generate all time buckets for a given date range and granularity
*/
export function generateTimeBuckets(
startDate: Date,
endDate: Date,
granularity: TimeGranularity
): Date[] {
const buckets: Date[] = []
let current = alignToGranularity(new Date(startDate), granularity)
const end = alignToGranularity(new Date(endDate), granularity)
// Safety: limit to prevent infinite loops (max 10,000 buckets)
const maxBuckets = 10000
while (current <= end && buckets.length < maxBuckets) {
buckets.push(new Date(current))
current = incrementByGranularity(current, granularity)
}
return buckets
}
/**
* Align a date to the start of its granularity bucket
*/
function alignToGranularity(date: Date, granularity: TimeGranularity): Date {
const aligned = new Date(date)
switch (granularity) {
case 'second':
aligned.setUTCMilliseconds(0)
break
case 'minute':
aligned.setUTCSeconds(0, 0)
break
case 'hour':
aligned.setUTCMinutes(0, 0, 0)
break
case 'day':
aligned.setUTCHours(0, 0, 0, 0)
break
case 'week': {
// Align to Monday (ISO week start)
const dayOfWeek = aligned.getUTCDay()
const daysToMonday = dayOfWeek === 0 ? 6 : dayOfWeek - 1
aligned.setUTCDate(aligned.getUTCDate() - daysToMonday)
aligned.setUTCHours(0, 0, 0, 0)
break
}
case 'month':
aligned.setUTCDate(1)
aligned.setUTCHours(0, 0, 0, 0)
break
case 'quarter': {
const quarterMonth = Math.floor(aligned.getUTCMonth() / 3) * 3
aligned.setUTCMonth(quarterMonth, 1)
aligned.setUTCHours(0, 0, 0, 0)
break
}
case 'year':
aligned.setUTCMonth(0, 1)
aligned.setUTCHours(0, 0, 0, 0)
break
}
return aligned
}
/**
* Increment a date by one granularity unit
*/
function incrementByGranularity(date: Date, granularity: TimeGranularity): Date {
const next = new Date(date)
switch (granularity) {
case 'second':
next.setUTCSeconds(next.getUTCSeconds() + 1)
break
case 'minute':
next.setUTCMinutes(next.getUTCMinutes() + 1)
break
case 'hour':
next.setUTCHours(next.getUTCHours() + 1)
break
case 'day':
next.setUTCDate(next.getUTCDate() + 1)
break
case 'week':
next.setUTCDate(next.getUTCDate() + 7)
break
case 'month':
next.setUTCMonth(next.getUTCMonth() + 1)
break
case 'quarter':
next.setUTCMonth(next.getUTCMonth() + 3)
break
case 'year':
next.setUTCFullYear(next.getUTCFullYear() + 1)
break
}
return next
}
/**
* Normalize a date value to ISO string for consistent comparison
*/
function normalizeDateKey(value: unknown): string {
if (value instanceof Date) {
return value.toISOString()
}
if (typeof value === 'string') {
// Parse and re-format for consistency
const date = new Date(value)
Eif (!isNaN(date.getTime())) {
return date.toISOString()
}
}
return String(value)
}
/**
* Create a dimension group key for grouping rows by dimension values
*/
function createDimensionGroupKey(
row: Record<string, unknown>,
dimensions: string[]
): string {
if (dimensions.length === 0) {
return '__all__'
}
return dimensions.map(dim => String(row[dim] ?? '')).join('|||')
}
/**
* Fill time series gaps in query result data
*
* @param data - Original query result data
* @param config - Gap filling configuration
* @returns Data with gaps filled
*/
export function fillTimeSeriesGaps(
data: Record<string, unknown>[],
config: GapFillerConfig
): Record<string, unknown>[] {
const { timeDimensionKey, granularity, dateRange, fillValue, measures, dimensions } = config
// Generate all expected time buckets
const timeBuckets = generateTimeBuckets(dateRange[0], dateRange[1], granularity)
Iif (timeBuckets.length === 0) {
return data
}
// Group data by dimension values
const dimensionGroups = new Map<string, Map<string, Record<string, unknown>>>()
for (const row of data) {
const groupKey = createDimensionGroupKey(row, dimensions)
const timeKey = normalizeDateKey(row[timeDimensionKey])
if (!dimensionGroups.has(groupKey)) {
dimensionGroups.set(groupKey, new Map())
}
dimensionGroups.get(groupKey)!.set(timeKey, row)
}
// If no data at all, create one group with no dimensions
if (dimensionGroups.size === 0 && dimensions.length === 0) {
dimensionGroups.set('__all__', new Map())
}
// Build filled result
const result: Record<string, unknown>[] = []
for (const [_groupKey, timeMap] of dimensionGroups) {
// Get a sample row from this group to extract dimension values
const sampleRow = timeMap.size > 0
? timeMap.values().next().value
: null
for (const bucket of timeBuckets) {
const bucketKey = bucket.toISOString()
const existingRow = timeMap.get(bucketKey)
if (existingRow) {
// Use existing row
result.push(existingRow)
} else {
// Create filled row
const filledRow: Record<string, unknown> = {
[timeDimensionKey]: bucketKey
}
// Copy dimension values from sample row
if (sampleRow) {
for (const dim of dimensions) {
filledRow[dim] = sampleRow[dim]
}
}
// Fill measures with fill value
for (const measure of measures) {
filledRow[measure] = fillValue
}
result.push(filledRow)
}
}
}
return result
}
/**
* Parse date range from query time dimension
* Handles both array format ['2024-01-01', '2024-01-31'] and string format
*/
export function parseDateRange(dateRange: string | string[] | undefined): [Date, Date] | null {
if (!dateRange) {
return null
}
if (Array.isArray(dateRange)) {
if (dateRange.length < 2) {
return null
}
const start = new Date(dateRange[0])
const end = new Date(dateRange[1])
if (isNaN(start.getTime()) || isNaN(end.getTime())) {
return null
}
return [start, end]
}
// Handle relative date ranges (e.g., 'last 7 days', 'this week')
// For now, just try to parse as a single date string
const date = new Date(dateRange)
if (!isNaN(date.getTime())) {
// Single date - return same day range
return [date, date]
}
// Relative date strings would need more complex parsing
// For now, return null to skip gap filling
return null
}
/**
* Apply gap filling to query result data based on query configuration
*
* @param data - Original query result data
* @param query - The semantic query
* @param measures - List of measure names in the result
* @returns Data with gaps filled (if applicable)
*/
export function applyGapFilling(
data: Record<string, unknown>[],
query: SemanticQuery,
measures: string[]
): Record<string, unknown>[] {
// Check if we have time dimensions to fill
if (!query.timeDimensions || query.timeDimensions.length === 0) {
return data
}
// Find time dimensions that need gap filling
const timeDimensionsToFill = query.timeDimensions.filter(td => {
// fillMissingDates defaults to true
const shouldFill = td.fillMissingDates !== false
// Must have granularity and dateRange to fill gaps
const canFill = td.granularity && td.dateRange
return shouldFill && canFill
})
if (timeDimensionsToFill.length === 0) {
return data
}
// Get fill value (default: 0, but allow explicit null)
const fillValue = query.fillMissingDatesValue === undefined ? 0 : query.fillMissingDatesValue
// Get regular dimensions (exclude time dimensions)
const timeDimensionKeys = new Set(query.timeDimensions.map(td => td.dimension))
const regularDimensions = (query.dimensions || []).filter(d => !timeDimensionKeys.has(d))
// Apply gap filling for each time dimension
// Note: Currently only supports single time dimension gap filling
// Multiple time dimensions would require more complex logic
let result = data
for (const timeDim of timeDimensionsToFill) {
const dateRange = parseDateRange(timeDim.dateRange)
if (!dateRange) {
continue
}
const config: GapFillerConfig = {
timeDimensionKey: timeDim.dimension,
granularity: timeDim.granularity!,
dateRange,
fillValue,
measures,
dimensions: regularDimensions
}
result = fillTimeSeriesGaps(result, config)
}
return result
}
|