in components/suggest/src/schema.rs [277:652]
fn upgrade_from(&self, tx: &Transaction<'_>, version: u32) -> open_database::Result<()> {
match version {
1..=15 => {
// Treat databases with these older schema versions as corrupt,
// so that they'll be replaced by a fresh, empty database with
// the current schema.
Err(open_database::Error::Corrupt)
}
16 => {
tx.execute(
"
CREATE TABLE dismissed_suggestions (
url_hash INTEGER PRIMARY KEY
) WITHOUT ROWID;",
(),
)?;
Ok(())
}
17 => {
tx.execute(
"
DROP TABLE dismissed_suggestions;
CREATE TABLE dismissed_suggestions (
url TEXT PRIMARY KEY
) WITHOUT ROWID;",
(),
)?;
Ok(())
}
18 => {
tx.execute_batch(
"
CREATE TABLE IF NOT EXISTS dismissed_suggestions (
url TEXT PRIMARY KEY
) WITHOUT ROWID;",
)?;
Ok(())
}
19 => {
// Clear the database since we're going to be dropping the keywords table and
// re-creating it
clear_database(tx)?;
tx.execute_batch(
"
-- Recreate the various keywords table to drop the foreign keys.
DROP TABLE keywords;
DROP TABLE full_keywords;
DROP TABLE prefix_keywords;
CREATE TABLE keywords(
keyword TEXT NOT NULL,
suggestion_id INTEGER NOT NULL,
full_keyword_id INTEGER NULL,
rank INTEGER NOT NULL,
PRIMARY KEY (keyword, suggestion_id)
) WITHOUT ROWID;
CREATE TABLE full_keywords(
id INTEGER PRIMARY KEY,
suggestion_id INTEGER NOT NULL,
full_keyword TEXT NOT NULL
);
CREATE TABLE prefix_keywords(
keyword_prefix TEXT NOT NULL,
keyword_suffix TEXT NOT NULL DEFAULT '',
confidence INTEGER NOT NULL DEFAULT 0,
rank INTEGER NOT NULL,
suggestion_id INTEGER NOT NULL,
PRIMARY KEY (keyword_prefix, keyword_suffix, suggestion_id)
) WITHOUT ROWID;
CREATE UNIQUE INDEX keywords_suggestion_id_rank ON keywords(suggestion_id, rank);
",
)?;
Ok(())
}
// Migration for the fakespot data. This is not currently active for any users, it's
// only used for the tests. It's safe to alter the fakespot_custom_detail schema and
// update this migration as the project moves forward.
//
// Note: if we want to add a regular migration while the fakespot code is still behind
// a feature flag, insert it before this one and make fakespot the last migration.
20 => {
tx.execute_batch(
"
CREATE TABLE fakespot_custom_details(
suggestion_id INTEGER PRIMARY KEY,
fakespot_grade TEXT NOT NULL,
product_id TEXT NOT NULL,
rating REAL NOT NULL,
total_reviews INTEGER NOT NULL,
FOREIGN KEY(suggestion_id) REFERENCES suggestions(id) ON DELETE CASCADE
);
-- Create the Fakespot FTS table.
-- The `tokenize` param is hard to read. The effect is that dashes and apostrophes are
-- considered valid tokens in a word, rather than separators.
CREATE VIRTUAL TABLE IF NOT EXISTS fakespot_fts USING FTS5(
title,
prefix='4 5 6 7 8 9 10 11',
content='',
contentless_delete=1,
tokenize=\"porter unicode61 remove_diacritics 2 tokenchars '''-'\"
);
CREATE TRIGGER fakespot_ai AFTER INSERT ON fakespot_custom_details BEGIN
INSERT INTO fakespot_fts(rowid, title)
SELECT id, title
FROM suggestions
WHERE id = new.suggestion_id;
END;
",
)?;
Ok(())
}
21 => {
// Drop and re-create the fakespot_custom_details to add the icon_id column.
tx.execute_batch(
"
DROP TABLE fakespot_custom_details;
CREATE TABLE fakespot_custom_details(
suggestion_id INTEGER PRIMARY KEY,
fakespot_grade TEXT NOT NULL,
product_id TEXT NOT NULL,
rating REAL NOT NULL,
total_reviews INTEGER NOT NULL,
icon_id TEXT,
FOREIGN KEY(suggestion_id) REFERENCES suggestions(id) ON DELETE CASCADE
);
CREATE TRIGGER fakespot_ai AFTER INSERT ON fakespot_custom_details BEGIN
INSERT INTO fakespot_fts(rowid, title)
SELECT id, title
FROM suggestions
WHERE id = new.suggestion_id;
END;
",
)?;
Ok(())
}
22 => {
// Drop and re-create the fakespot_fts table to remove the prefix index param
tx.execute_batch(
"
DROP TABLE fakespot_fts;
CREATE VIRTUAL TABLE fakespot_fts USING FTS5(
title,
content='',
contentless_delete=1,
tokenize=\"porter unicode61 remove_diacritics 2 tokenchars '''-'\"
);
",
)?;
Ok(())
}
23 => {
// Drop all suggestions, then recreate the fakespot_custom_details table to add the
// `keywords` and `product_type` fields.
clear_database(tx)?;
tx.execute_batch(
"
DROP TABLE fakespot_custom_details;
CREATE TABLE fakespot_custom_details(
suggestion_id INTEGER PRIMARY KEY,
fakespot_grade TEXT NOT NULL,
product_id TEXT NOT NULL,
keywords TEXT NOT NULL,
product_type TEXT NOT NULL,
rating REAL NOT NULL,
total_reviews INTEGER NOT NULL,
icon_id TEXT,
FOREIGN KEY(suggestion_id) REFERENCES suggestions(id) ON DELETE CASCADE
);
CREATE TRIGGER fakespot_ai AFTER INSERT ON fakespot_custom_details BEGIN
INSERT INTO fakespot_fts(rowid, title)
SELECT id, title
FROM suggestions
WHERE id = new.suggestion_id;
END;
",
)?;
Ok(())
}
24 => {
// Clear the database so that we re-ingest and populate the ingested_records table.
clear_database(tx)?;
tx.execute_batch(
"
CREATE TABLE rs_cache(
collection TEXT PRIMARY KEY,
data TEXT NOT NULL
) WITHOUT ROWID;
CREATE TABLE ingested_records(
id TEXT,
collection TEXT,
type TEXT NOT NULL,
last_modified INTEGER NOT NULL,
PRIMARY KEY (id, collection)
) WITHOUT ROWID;
",
)?;
Ok(())
}
25 => {
// Create the exposure suggestions table and index.
tx.execute_batch(
"
CREATE TABLE exposure_custom_details(
suggestion_id INTEGER PRIMARY KEY,
type TEXT NOT NULL,
FOREIGN KEY(suggestion_id) REFERENCES suggestions(id) ON DELETE CASCADE
);
CREATE INDEX exposure_custom_details_type ON exposure_custom_details(type);
",
)?;
Ok(())
}
26 => {
// Create tables related to city-based weather.
tx.execute_batch(
"
CREATE TABLE keywords_metrics(
record_id TEXT NOT NULL PRIMARY KEY,
provider INTEGER NOT NULL,
max_length INTEGER NOT NULL,
max_word_count INTEGER NOT NULL
) WITHOUT ROWID;
CREATE TABLE geonames(
id INTEGER PRIMARY KEY,
record_id TEXT NOT NULL,
name TEXT NOT NULL,
feature_class TEXT NOT NULL,
feature_code TEXT NOT NULL,
country_code TEXT NOT NULL,
admin1_code TEXT NOT NULL,
population INTEGER
);
CREATE INDEX geonames_feature_class ON geonames(feature_class);
CREATE INDEX geonames_feature_code ON geonames(feature_code);
CREATE TABLE geonames_alternates(
name TEXT NOT NULL,
geoname_id INTEGER NOT NULL,
PRIMARY KEY (name, geoname_id),
FOREIGN KEY(geoname_id) REFERENCES geonames(id) ON DELETE CASCADE
) WITHOUT ROWID;
CREATE TABLE geonames_metrics(
record_id TEXT NOT NULL PRIMARY KEY,
max_name_length INTEGER NOT NULL,
max_name_word_count INTEGER NOT NULL
) WITHOUT ROWID;
",
)?;
Ok(())
}
27 => {
// Add latitude and longitude to the geonames table. Clear the
// database so geonames are reingested.
clear_database(tx)?;
tx.execute_batch(
"
DROP INDEX geonames_feature_class;
DROP INDEX geonames_feature_code;
DROP TABLE geonames;
CREATE TABLE geonames(
id INTEGER PRIMARY KEY,
record_id TEXT NOT NULL,
name TEXT NOT NULL,
latitude REAL NOT NULL,
longitude REAL NOT NULL,
feature_class TEXT NOT NULL,
feature_code TEXT NOT NULL,
country_code TEXT NOT NULL,
admin1_code TEXT NOT NULL,
population INTEGER NOT NULL
);
CREATE INDEX geonames_feature_class ON geonames(feature_class);
CREATE INDEX geonames_feature_code ON geonames(feature_code);
",
)?;
Ok(())
}
28 => {
// Add `iso_language` column to `geonames_alternates`. Clear the
// database so geonames are reingested.
clear_database(tx)?;
tx.execute_batch(
"
DROP TABLE geonames_alternates;
CREATE TABLE geonames_alternates(
name TEXT NOT NULL,
geoname_id INTEGER NOT NULL,
-- The value of the `iso_language` field for the alternate. This will be
-- null for the alternate we artificially create for the `name` in the
-- corresponding geoname record.
iso_language TEXT,
PRIMARY KEY (name, geoname_id),
FOREIGN KEY(geoname_id) REFERENCES geonames(id) ON DELETE CASCADE
) WITHOUT ROWID;
",
)?;
Ok(())
}
29 => {
// This migration only clears the database because some tables
// that should have been cleared in previous migrations were
// not.
clear_database(tx)?;
Ok(())
}
30 => {
// Add the `geonames_alternates_geoname_id` index.
clear_database(tx)?;
tx.execute_batch(
"
CREATE INDEX geonames_alternates_geoname_id ON geonames_alternates(geoname_id);
",
)?;
Ok(())
}
31 => {
// Need to clear the database so that the FTS index will get filled.
clear_database(tx)?;
tx.execute_batch(
"
CREATE VIRTUAL TABLE IF NOT EXISTS amp_fts USING FTS5(
full_keywords,
title,
content='',
contentless_delete=1,
tokenize=\"porter unicode61 remove_diacritics 2 tokenchars '''-'\"
);
",
)?;
Ok(())
}
32 => {
// Drop rs_cache since it's no longer needed.
tx.execute_batch("DROP TABLE rs_cache;")?;
Ok(())
}
33 => {
// This migration is due to the replacement of the
// `quicksuggest` collection with `quicksuggest-amp` and
// `quicksuggest-other`. Clear the DB so that records from the
// old collection don't stick around. See bug 1953945.
clear_database(tx)?;
Ok(())
}
34 => {
// Replace the exposure suggestions table and index with the
// dynamic suggestions table and index.
tx.execute_batch(
r#"
DROP INDEX exposure_custom_details_type;
DROP TABLE exposure_custom_details;
CREATE TABLE dynamic_custom_details(
suggestion_id INTEGER PRIMARY KEY,
suggestion_type TEXT NOT NULL,
json_data TEXT,
FOREIGN KEY(suggestion_id) REFERENCES suggestions(id) ON DELETE CASCADE
);
CREATE INDEX dynamic_custom_details_suggestion_type ON dynamic_custom_details(suggestion_type);
"#,
)?;
Ok(())
}
35 => {
// The commit that added this migration was reverted.
Ok(())
}
36 => {
tx.execute_batch("DROP TABLE IF EXISTS yelp_location_signs;")?;
Ok(())
}
_ => Err(open_database::Error::IncompatibleVersion(version)),
}
}