const std = @import("std"); const Db = @import("Db.zig"); const Item = @import("Item.zig"); const Tag = @import("Tag.zig"); const json = @import("json.zig"); const util = @import("util.zig"); pub fn process(jobj: *json.Obj, db: *Db, allocator: std.mem.Allocator) ![]const u8 { var jret = json.Obj.newObject(); defer jret.deinit(); // Test the action to carry and pass the object // TODO: Change to { "type" : "", "data" : } // TODO: Automatize this, give a list of actions and autobuild the checks in comptime if (jobj.objectGet("add")) |*jaction| { var ret = try add(jaction, db, allocator); // TODO: Maybe change all of these to "affected"? jret.objectAddString("type", "added"); jret.objectAdd("data", &ret); } else |_| {} if (jobj.objectGet("query")) |*jaction| { var ret = try query(jaction, db, allocator); jret.objectAddString("type", "queried"); jret.objectAdd("data", &ret); } else |_| {} if (jobj.objectGet("delete")) |*jaction| { var ret = try delete(jaction, db, allocator); jret.objectAddString("type", "deleted"); jret.objectAdd("data", &ret); } else |_| {} const ret_str = std.mem.sliceTo(jret.toString(), 0); return allocator.dupe(u8, ret_str); } pub fn add(jobj: *const json.Obj, db: *Db, allocator: std.mem.Allocator) !json.Obj { // TODO: Maybe return error when no items in the array? // Freed by the caller var jret = json.Obj.newArray(); var iter = jobj.arrayGetIterator(); while (iter.next()) |*jtags| { var item = Item{ .id = null, .tags = try Item.tagsFromJson(jtags, allocator), }; item.deinit(); // Insert new items into the DB try item.persist(db, allocator); // Add item to new json array (Makes a deep copy, freed with jret.deinit()) jret.arrayAdd(&item.toJson()); } return jret; } pub fn query(jobj: *const json.Obj, db: *Db, allocator: std.mem.Allocator) !json.Obj { // TODO: Have into account limits so it is scalable // TODO: Do not fetch EVERY id at once, iterate where possible const query_str = jobj.getString(); var jret = json.Obj.newArray(); var ids = std.ArrayList([]u8).init(allocator); defer { for (ids.items) |item| { Db.free(item.ptr); } ids.deinit(); } if (query_str.len > 0) { // Get all the items under the individual tags var tag_iter = std.mem.split(u8, query_str, " "); while (tag_iter.next()) |tag| { // Get the tag selector: "tag:" const tag_sel = try std.mem.concat(allocator, u8, &[_][]const u8{ "tag:", tag }); defer allocator.free(tag_sel); // Get the items that have that tag: " " try ids.append(db.get(tag_sel) orelse return jret); // This has been deferred before. //defer Db.free(tag_str.ptr); } } else { try ids.append(db.get("item") orelse return jret); } var id_iter = util.intersection(ids.items); while (id_iter.next()) |item_id| { var item = (try Item.getById(item_id, db, allocator)) orelse continue; defer item.deinit(); jret.arrayAdd(&item.toJson()); } return jret; } pub fn delete(jobj: *const json.Obj, db: *Db, allocator: std.mem.Allocator) !json.Obj { var jret = json.Obj.newArray(); // Go over each tag var id_iter = jobj.arrayGetIterator(); while (id_iter.next()) |*jid| { const id = jid.getString(); // TODO: Return some kind of error or somethign telling that some were not found var item = (try Item.getById(id, db, allocator)) orelse continue; defer item.deinit(); jret.arrayAdd(&item.toJson()); try item.delete(db); } return jret; }