Implement multiple tag query
This commit is contained in:
parent
ce9a7ed56b
commit
b603972a40
|
@ -45,7 +45,7 @@ pub fn main() !void {
|
|||
std.debug.print("\n\n", .{});
|
||||
|
||||
const jsonQuery: [:0]const u8 =
|
||||
\\{ "query" : "fur made_with:krita -fur:dusk", "limit" : 20 }
|
||||
\\{ "query" : "fur made_with:krita date:2022", "limit" : 20 }
|
||||
;
|
||||
|
||||
var jquery = json.Obj.newFromString(jsonQuery);
|
||||
|
|
|
@ -4,6 +4,7 @@ const Db = @import("Db.zig");
|
|||
const Item = @import("Item.zig");
|
||||
const Tag = @import("Tag.zig");
|
||||
const json = @import("json.zig");
|
||||
const util = @import("util.zig");
|
||||
|
||||
pub fn process(jobj: *json.Obj, db: *Db) !void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
|
@ -56,25 +57,33 @@ pub fn query(jobj: *json.Obj, db: *Db, allocator: std.mem.Allocator) !json.Obj {
|
|||
var jret = json.Obj.newArray();
|
||||
|
||||
// Go through each tag
|
||||
var iter = std.mem.split(u8, query_str, " ");
|
||||
const opt_tag = iter.next();
|
||||
var tag_iter = std.mem.split(u8, query_str, " ");
|
||||
|
||||
if (opt_tag) |tag| {
|
||||
var ids = std.ArrayList([]u8).init(allocator);
|
||||
defer {
|
||||
for (ids.items) |item| {
|
||||
Db.free(item.ptr);
|
||||
}
|
||||
ids.deinit();
|
||||
}
|
||||
|
||||
while (tag_iter.next()) |tag| {
|
||||
// Get the tag selector: "tag:<tag>"
|
||||
const tag_sel = try std.mem.concat(allocator, u8, &[_][]const u8{"tag:", tag});
|
||||
const tag_sel = try std.mem.concat(allocator, u8, &[_][]const u8{ "tag:", tag });
|
||||
defer allocator.free(tag_sel);
|
||||
|
||||
// Get the items that have that tag: "<item1> <item2> <item3>"
|
||||
const tag_str = db.get(tag_sel) orelse return jret;
|
||||
defer Db.free(tag_str.ptr);
|
||||
try ids.append(db.get(tag_sel) orelse return jret);
|
||||
|
||||
// Iterate through the items id
|
||||
var item_iter = std.mem.split(u8, tag_str, " ");
|
||||
while (item_iter.next()) |item_id| {
|
||||
const item = (try Item.getById(item_id, db, allocator)) orelse continue;
|
||||
// This has been deferred before.
|
||||
//defer Db.free(tag_str.ptr);
|
||||
}
|
||||
|
||||
jret.arrayAdd(&item.toJson());
|
||||
}
|
||||
var id_iter = util.intersection(ids.items);
|
||||
while (id_iter.next()) |item_id| {
|
||||
const item = (try Item.getById(item_id, db, allocator)) orelse continue;
|
||||
|
||||
jret.arrayAdd(&item.toJson());
|
||||
}
|
||||
|
||||
return jret;
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
const std = @import("std");
|
||||
|
||||
|
||||
pub const IntersectionIterator = struct {
|
||||
ref: std.mem.TokenIterator(u8),
|
||||
rest: []const []const u8,
|
||||
|
||||
/// Returns the next occurrence or null. Returned value is owned by originally passed slice
|
||||
pub fn next(self: *IntersectionIterator) ?[]const u8{
|
||||
// If this returns null, there's no more items to check. We're finished.
|
||||
const needle = self.ref.next() orelse return null;
|
||||
|
||||
// Search into every one of the strings
|
||||
haystack: for (self.rest) |haystack| {
|
||||
|
||||
// The items are separated by spaces, iterate through those
|
||||
var haystack_iter = std.mem.tokenize(u8, haystack, " ");
|
||||
|
||||
// If the word is found, it short circuits and tries the next haystack
|
||||
while (haystack_iter.next()) |hay_blade| {
|
||||
if (std.mem.eql(u8, hay_blade, needle)) continue :haystack;
|
||||
}
|
||||
|
||||
// If this point is reached, the needle wasn't found.
|
||||
// Try the next one
|
||||
return self.next();
|
||||
}
|
||||
|
||||
// If we got here, no not-matching haysack was found. A match!
|
||||
return needle;
|
||||
}
|
||||
};
|
||||
|
||||
/// Finds the common tokens inside space separated strings
|
||||
/// and returns a slice to the first token that matches
|
||||
pub fn intersection(slices: []const []const u8) IntersectionIterator {
|
||||
// Tokenize the first entry and save the rest as references to check
|
||||
return IntersectionIterator{
|
||||
.ref = std.mem.tokenize(u8, slices[0], " "),
|
||||
.rest = slices[1..],
|
||||
};
|
||||
}
|
Loading…
Reference in New Issue