Skip to content
Snippets Groups Projects
Commit eaae20dc authored by Jacob Cain's avatar Jacob Cain
Browse files

paging fix, remove default &limit=10 if present

parent d27671c6
No related branches found
No related tags found
No related merge requests found
...@@ -48,7 +48,6 @@ export default function Sidebar(props) { ...@@ -48,7 +48,6 @@ export default function Sidebar(props) {
// Callback function to update selected title // Callback function to update selected title
const updateAvailableQueriables = (queriables) => { const updateAvailableQueriables = (queriables) => {
console.log(queriables);
setAvailableQueriables(queriables); setAvailableQueriables(queriables);
}; };
......
...@@ -173,8 +173,12 @@ export default L.Map.AstroMap = L.Map.extend({ ...@@ -173,8 +173,12 @@ export default L.Map.AstroMap = L.Map.extend({
for(const feature of myFeatures) { for(const feature of myFeatures) {
// Check if feature or feature.geometry is null or undefined // Check if feature or feature.geometry is null or undefined
if(!feature || !feature.geometry){ if(!feature){
console.warn("Invalid feature or missing geometry: ", feature); console.log("Invalid/Null Feature", feature);
continue;
}
else if(!feature.geometry){
console.log("Feature with missing geometry", feature)
continue; continue;
} }
......
...@@ -17,8 +17,9 @@ export async function FetchObjects(objInfo) { ...@@ -17,8 +17,9 @@ export async function FetchObjects(objInfo) {
// For each url given // For each url given
for(const key in objInfo) { for(const key in objInfo) {
// Fetch JSON from url and read into object // Fetch JSON from url and read into object
// The stylesheet ones \/ get 404s so I'm discarding them for now
if (!key.includes(": stylesheet")){
fetchPromise[key] = fetch( fetchPromise[key] = fetch(
objInfo[key] objInfo[key]
).then((res) => { ).then((res) => {
...@@ -31,6 +32,7 @@ export async function FetchObjects(objInfo) { ...@@ -31,6 +32,7 @@ export async function FetchObjects(objInfo) {
console.log(err); console.log(err);
}); });
} }
}
// Wait for each query to complete // Wait for each query to complete
for(const key in objInfo){ for(const key in objInfo){
...@@ -50,86 +52,35 @@ export async function FetchObjects(objInfo) { ...@@ -50,86 +52,35 @@ export async function FetchObjects(objInfo) {
* @param {string} queryString - The query to narrow the results returned from the collection. * @param {string} queryString - The query to narrow the results returned from the collection.
*/ */
export async function FetchFootprints(collection, page, step){ export async function FetchFootprints(collection, page, step){
let collectionUrl; const stacDefaultLimit = 10;
let offsetMulitiplier; const pyDefaultLimit = 25;
let baseURL = collection.url;
let pageInfo = ""; let pageInfo = "";
if(collection.url.slice(-1) !== "?") {
pageInfo += "&"
}
pageInfo += "page=" + page;
if (step != 10){
pageInfo += "&limit=" + step;
}
// check for pyGeo API
if (!collection.url.includes('stac'))
{
// set offset for 5 & 10 steps
offsetMulitiplier = (page * 10 - step);
pageInfo = "&offset=" + offsetMulitiplier;
// checks for 5 change in step
if (step <= 10)
{
// splice limit and change to new limit
collectionUrl = collection.url.split('&limit=')[0];
collection.url = collectionUrl;
// update page pageInfo
pageInfo = "&offset=" + offsetMulitiplier + "&limit=" + step;
// get rid of default limit present in some pygeoapi urls
if(baseURL.slice(-9) == "&limit=10") {
baseURL = baseURL.slice(0, -9);
} }
// checks for 50 & 100 step
else if (step == 50 || step == 100)
{
// splice limit and change to new limit
collectionUrl = collection.url.split('&limit=')[0];
collection.url = collectionUrl;
// check for first page if(collection.url.slice(-1) !== "?") {
if (page == 1) pageInfo += "&"
{
// set multiplier to 0
offsetMulitiplier = 0;
} }
// check for second page
else if (page == 2)
{
// set multiplier to step
offsetMulitiplier = step;
} if (collection.url.includes('stac'))
else
{ {
// check for 50 and set pages according pageInfo += "page=" + page;
if (step == 50) if (step !== stacDefaultLimit) {
{ pageInfo += "&limit=" + step;
offsetMulitiplier = page * step - 50;
}
// check for 100 and set pages according
else
{
offsetMulitiplier = page * step - 100;
} }
} }
else {
// update page pageInfo pageInfo += "offset=" + step * (page - 1);
pageInfo = "&offset=" + offsetMulitiplier + "&limit=" + step; if (step !== pyDefaultLimit) {
pageInfo += "&limit=" + step;
} }
} }
// reset offset let jsonRes = await FetchObjects(baseURL + pageInfo);
offsetMulitiplier = 0;
let jsonRes = await FetchObjects(collection.url + pageInfo);
return jsonRes.features; return jsonRes.features;
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment