diff --git a/src/partition/sph-duplicate2.cc b/src/partition/sph-duplicate2.cc index e39aa25bc..5775716a2 100644 --- a/src/partition/sph-duplicate2.cc +++ b/src/partition/sph-duplicate2.cc @@ -636,7 +636,7 @@ size_t duplicateObjectRow(std::string &line, part::SphericalBox const &box, std: double decl(0.); int idx = 0; - for (std::string const token : tokens) { + for (std::string const &token : tokens) { if (coldefObject.idxDeepSourceId == idx) { deepSourceId = boost::lexical_cast(token); } else if (coldefObject.idxRa == idx) { @@ -764,7 +764,7 @@ size_t duplicateSourceRow(std::string &line, part::SphericalBox const &box, std: double cluster_coord_decl(0.); int idx = 0; - for (std::string const token : tokens) { + for (std::string const &token : tokens) { if (coldefSource.idxId == idx) { id = boost::lexical_cast(token); } else if (coldefSource.idxCoordRa == idx) { @@ -915,7 +915,7 @@ size_t duplicateForcedSourceRow(std::string &line, part::SphericalBox const &box uint64_t deepSourceId(0ULL); int idx = 0; - for (std::string const token : tokens) { + for (std::string const &token : tokens) { if (coldefForcedSource.idxDeepSourceId == idx) { deepSourceId = boost::lexical_cast(token); } diff --git a/src/replica/HttpIngestModule.cc b/src/replica/HttpIngestModule.cc index 0f3726c4a..807150d90 100644 --- a/src/replica/HttpIngestModule.cc +++ b/src/replica/HttpIngestModule.cc @@ -176,7 +176,7 @@ json HttpIngestModule::_getDatabases() { debug(__func__, "isPublished=" + bool2str(isPublished)); json databasesJson = json::array(); - for (auto const databaseName : config->databases(family, allDatabases, isPublished)) { + for (string const& databaseName : config->databases(family, allDatabases, isPublished)) { auto const database = config->databaseInfo(databaseName); databasesJson.push_back({{"name", database.name}, {"family", database.family}, @@ -414,12 +414,12 @@ json HttpIngestModule::_deleteDatabase() { conn->execute(g.dropDb(database.name, ifExists)); auto const emptyChunkListTable = css::DbInterfaceMySql::getEmptyChunksTableName(database.name); conn->execute(g.dropTable(g.id("qservCssData", emptyChunkListTable), ifExists)); - for (auto const tableName : directorTables) { + for (string const& tableName : directorTables) { string const query = g.dropTable( g.id("qservMeta", directorIndexTableName(database.name, tableName)), ifExists); conn->execute(query); } - for (auto const tableName : database.tables()) { + for (string const& tableName : database.tables()) { try { string const query = g.dropTable(g.id("qservMeta", rowCountersTable(database.name, tableName)), ifExists); @@ -1077,7 +1077,7 @@ void HttpIngestModule::_removeMySQLPartitions(DatabaseInfo const& database, bool bool const ignoreNonPartitioned = true; string const noParentJobId; string error; - for (auto const tableName : database.tables()) { + for (string const& tableName : database.tables()) { auto const& table = database.findTable(tableName); // Skip tables that have been published. if (table.isPublished) continue;