Solidity and Etheruem: Handling events while using the factory pattern - mocha.js

I am in the beginning stages on learning Solidity and hardhat. I found a tutorial that seems pretty good and I was able to get the code to work from the example here: https://dev.to/dabit3/building-scalable-full-stack-apps-on-ethereum-with-polygon-2cfb. Let's say I wanted to expand this example and add factory pattern functionality to it. If a method is getting called through a factory method, how do I ensure events are propogated out past the factory method? In this case, how would I handle the event(s) in the sample.js file?
NFTMarketFactory.sol:
// contracts/Market.sol
// SPDX-License-Identifier: MIT OR Apache-2.0
pragma solidity ^0.8.3;
import "./NFTMarket.sol";
import "#openzeppelin/contracts/security/ReentrancyGuard.sol";
import "#openzeppelin/contracts/token/ERC721/ERC721.sol";
contract NFTMarketFactory is ReentrancyGuard {
address address;
address owner;
constructor() {
owner = payable(msg.sender);
}
.
.
.
function createMarketItem(
address nftContract,
uint256 tokenId,
uint256 price
) public payable nonReentrant {
NFTMarket market = NFTMarket(address);
return market.createMarketItem{value: msg.value}(address, tokenId, price, msg.sender);
}
}
NFTMarket.sol
/* Places an item for sale on the marketplace */
function createMarketItem(
address nftContract,
uint256 tokenId,
uint256 price,
address sender
) public payable nonReentrant {
require(price > 0, "Price must be at least 1 wei");
require(msg.value == listingPrice, "Price must be equal to listing price");
_itemIds.increment();
uint256 itemId = _itemIds.current();
idToMarketItem[itemId] = MarketItem(
itemId,
nftContract,
tokenId,
payable(sender),
payable(address(0)),
price,
false
);
IERC721(nftContract).transferFrom(sender, address(this), tokenId);
emit MarketItemCreated(
itemId,
nftContract,
tokenId,
sender,
address(0),
price,
false
);
}
test/sample.js
.
.
.
/* create two tokens */
let token1 = await nft.createToken("https://www.mytokenlocation.com");
let token2 = await nft.createToken("https://www.mytokenlocation2.com");
/* put both tokens for sale */
await nftMarketFactory.createMerchandise(nftContractAddress, 1, auctionPrice, { value: listingPrice })
await nftMarketFactory.createMerchandise(nftContractAddress, 2, auctionPrice, { value: listingPrice })
const [_, buyerAddress] = await ethers.getSigners()
/* execute sale of token to another user */
await nftMarketFactory.connect(buyerAddress).createMarketSale(nftContractAddress, 1, { value: auctionPrice})
.
.
.

Related

Smart Contracts - Chainlink VRF / #openzeppelin/truffle-upgrades compatibility

Right now i am transforming a basic smart contract into a upgradeable smart contract using #openzeppelin/truffle-upgrades
So i followed all required steps from the docs but one issue remains:
Truffle-Upgrades requires me to replace the constructor with an initializer which is fine for me, but not for the smart contracts imported into my own smart contract, sample:
pragma solidity 0.6.6;
import "#chainlink/contracts/src/v0.6/VRFConsumerBase.sol";
contract Sample is VRFConsumerBase {
address private owner;
bytes32 internal keyHash;
uint256 internal fee;
constructor(address _owner)
VRFConsumerBase(
0xa555fC018435bef5A13C6c6870a9d4C11DEC329C, // VRF Coordinator
0x84b9B910527Ad5C03A9Ca831909E21e236EA7b06 // LINK Token
) public
{
keyHash = 0xcaf3c3727e033261d383b315559476f48034c13b18f8cafed4d871abe5049186;
fee = 0.1 * 10 ** 18; // 0.1 LINK (Varies by network)
owner = _owner;
}
...
And therefore truffle complains:
../#chainlink/contracts/src/v0.6/VRFConsumerBase.sol:182: Contract `VRFConsumerBase` has a constructor
Define an initializer instead
As it is a third party package i can not replace it :)
Are there any architectural tricks/configurations?
I went through pretty much all docs on chainlink/truffle but did not find a solution for this issue.
Thanks!
UPDATE 1:
First of all, i modified the VRFConsumerBase contract to: (i also removed the comments to keep it short..)
// SPDX-License-Identifier: MIT
pragma solidity 0.6.12;
import "#chainlink/contracts/src/v0.6/vendor/SafeMathChainlink.sol";
import "#chainlink/contracts/src/v0.6/interfaces/LinkTokenInterface.sol";
import "#chainlink/contracts/src/v0.6/VRFRequestIDBase.sol";
abstract contract VRFConsumerBaseUpgradable is VRFRequestIDBase {
using SafeMathChainlink for uint256;
function fulfillRandomness(bytes32 requestId, uint256 randomness)
internal virtual;
function requestRandomness(bytes32 _keyHash, uint256 _fee, uint256 _seed)
internal returns (bytes32 requestId)
{
LINK.transferAndCall(vrfCoordinator, _fee, abi.encode(_keyHash, _seed));
uint256 vRFSeed = makeVRFInputSeed(_keyHash, _seed, address(this), nonces[_keyHash]);
nonces[_keyHash] = nonces[_keyHash].add(1);
return makeRequestId(_keyHash, vRFSeed);
}
// removed immutable keyword <--
LinkTokenInterface internal LINK;
// removed immutable keyword <--
address private vrfCoordinator;
mapping(bytes32 /* keyHash */ => uint256 /* nonce */) private nonces;
// replaced constructor with initializer <--
function initialize(address _vrfCoordinator, address _link) public {
vrfCoordinator = _vrfCoordinator;
LINK = LinkTokenInterface(_link);
}
function rawFulfillRandomness(bytes32 requestId, uint256 randomness) external {
require(msg.sender == vrfCoordinator, "Only VRFCoordinator can fulfill");
fulfillRandomness(requestId, randomness);
}
}
What did i do:
I replaced the constructor with an initializer
I removed the immutable keyword from the state variables
Next, i used the Initializable contract from #openzeppelin/contracts-upgradeable in my file system to prevent the smart contract executing the initializer more than once:
// SPDX-License-Identifier: MIT
pragma solidity 0.6.12;
abstract contract Initializable {
bool private _initialized;
bool private _initializing;
modifier initializer() {
require(_initializing || !_initialized, "Initializable: contract is already initialized");
bool isTopLevelCall = !_initializing;
if (isTopLevelCall) {
_initializing = true;
_initialized = true;
}
_;
if (isTopLevelCall) {
_initializing = false;
}
}
}
Important:
I did not import the Initializable contract via the import statement in solidity.
Instead i copied the source code manually and set the compiler to 0.6.12 because #openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol is running on 0.8.x
Finally, i updated my contract to implement the Initializable and the new VRFConsumerBaseUpgradable contract:
// SPDX-License-Identifier: MIT
pragma solidity 0.6.12;
import "./Initializable.sol";
import "./VRFConsumerBaseUpgradable.sol";
contract Sample is Initializable, VRFConsumerBaseUpgradable {
bytes32 internal keyHash;
uint256 internal fee;
address private owner;
function initialize(address _owner)
public
initializer
{
VRFConsumerBaseUpgradable.initialize(
0xa555fC018435bef5A13C6c6870a9d4C11DEC329C, // VRF Coordinator
0x84b9B910527Ad5C03A9Ca831909E21e236EA7b06 // LINK Token
);
keyHash = 0xcaf3c3727e033261d383b315559476f48034c13b18f8cafed4d871abe5049186;
fee = 0.1 * 10 ** 18;
owner = _owner;
}
function getRandomNumber(uint256 userProvidedSeed) public returns (bytes32 requestId) {
require(LINK.balanceOf(address(this)) >= fee, "Not enough LINK - fill contract with faucet");
return requestRandomness(keyHash, fee, userProvidedSeed);
}
function fulfillRandomness(bytes32 requestId, uint256 randomness) internal override {
// logic
}
...
}
I tested an initial migration as well as an upgrade via truffle and it both worked so i think this is fine and i am leaving this for future researchers..
What do you think? Should i create a merge request for VRFConsumerBaseUpgradable?

Transferring assets between accounts and pallet

I'm trying to create a pallet that users can deposit assets into and withdraw from.
I've written the following code, but I'm not sure it's the best way to about things due to frame_system::RawOrigin::Root.into() being accessible by every runtime.
I'm still fairly new to Substrate and not sure this is exactly how it works, would love some guidance on the best design choice.
Making use of assets pallet to deposit:
<Assets::Module<T>>::transfer(origin, asset_id, RawOrigin::Root.into(), amount);
To Withdraw:
<Assets::Module<T>>::transfer(RawOrigin::Root.into(), asset_id, origin, amount);
Edit
A similar idea written in Solidity:
contract DepositWithdrawSend {
using SafeMath for uint256;
mapping (address => mapping (address => uint256)) public depositInfo;
address public sendPallet;
constructor(address _sendPallet) public {
sendPallet = _sendPallet;
}
function deposit(address _token, uint256 _amount) public {
IERC20(_token).transferFrom(msg.sender, address(this), amount);
depositInfo[_token][msg.sender] = depositInfo[_token][msg.sender].add(_amount);
}
function withdraw(address _token, uint256 _amount) public {
require(depositInfo[_token][msg.sender] >= _amount, "Over withdraw");
require(IERC20(_token).balanceOf(address(this)) >= _amount, "Not enough");
IERC20(_token).transfer(msg.sender, amount);
depositInfo[_token][msg.sender] = depositInfo[_token][msg.sender].sub(_amount);
}
function send(address _token, uint256 _amount) public {
require(IERC20(_token).balanceOf(address(this)) >= _amount, "Not enough");
IERC20(_token).transfer(sendPallet, amount);
}
}
We follow a pretty simple pattern to give pallets their own "account" for transferring balances to or anything else.
First you create a unique PalletId representing your pallet:
use frame_support::PalletId;
const MyPalletId: PalletId = PalletId(*b"replace_");
Then from here, you can generate an AccountId from this PalletId:
use sp_runtime::traits::AccountIdConversion;
/// These actually do computation. If you need to keep using them,
/// then make sure you cache the value and only call them once.
pub fn account_id() -> T::AccountId {
T::PalletId::get().into_account()
}
pub fn sub_account(seed: u16) -> T::AccountId {
// only use two byte prefix to support 16 byte account id (used by test)
// "modl" ++ "replace_" ++ "hi" is 14 bytes, and two bytes remaining for bounty index
T::PalletId::get().into_sub_account(("hi", id))
}
This pattern is used in the Treasury Pallet and others.

Sorting does not work for the customized ProductListComponentService.(CustomProductListComponentService)

Spartacus lists 10 products in the pagination defualt on its product listing page. But I wanted it to be 12.
In my .ts file normally for PLP
protected productListComponentServiceDefault: ProductListComponentService,
I was using the structure.
but I created "CutomProductListComponentService" by customizing the "ProductListComponentService" file.
protected defaultPageSize = 12;
I did what I wanted by adding. Pagination artok 12 works.
But after this customization, the "SORTING" operation within the page does not work anymore.
Inside CutomProductListComponentService I added the following:
import { Injectable } from '#angular/core';
import { ActivatedRoute, Router } from '#angular/router';
import {
ActivatedRouterStateSnapshot,
CurrencyService,
LanguageService,
ProductSearchPage,
ProductSearchService,
RouterState,
RoutingService,
} from '#spartacus/core';
import { combineLatest, Observable, using } from 'rxjs';
import {
debounceTime,
distinctUntilChanged,
filter,
map,
shareReplay,
tap,
} from 'rxjs/operators';
// #ts-ignore
import { ProductListRouteParams, SearchCriteria } from './product-list.model';
/**
* The `ProductListComponentService` is used to search products. The service is used
* on the Product Listing Page, for listing products and the facet navigation.
*
* The service exposes the product search results based on the category and search
* route parameters. The route parameters are used to query products by the help of
* the `ProductSearchService`.
*/
#Injectable({ providedIn: 'root' })
export class CustomProductListComponentService {
// TODO: make it configurable
protected defaultPageSize = 12;
protected readonly RELEVANCE_ALLCATEGORIES = ':relevance:allCategories:';
constructor(
protected productSearchService: ProductSearchService,
protected routing: RoutingService,
protected activatedRoute: ActivatedRoute,
protected currencyService: CurrencyService,
protected languageService: LanguageService,
protected router: Router
) {}
/**
* Emits the search results for the current search query.
*
* The `searchResults$` is _not_ concerned with querying, it only observes the
* `productSearchService.getResults()`
*/
protected searchResults$: Observable<
ProductSearchPage
> = this.productSearchService
.getResults()
.pipe(filter((searchResult) => Object.keys(searchResult).length > 0));
/**
* Observes the route and performs a search on each route change.
*
* Context changes, such as language and currencies are also taken
* into account, so that the search is performed again.
*/
protected searchByRouting$: Observable<
ActivatedRouterStateSnapshot
> = combineLatest([
this.routing.getRouterState().pipe(
distinctUntilChanged((x, y) => {
// router emits new value also when the anticipated `nextState` changes
// but we want to perform search only when current url changes
return x.state.url === y.state.url;
})
),
...this.siteContext,
]).pipe(
debounceTime(0),
map(([routerState, ..._context]) => (routerState as RouterState).state),
tap((state: ActivatedRouterStateSnapshot) => {
const criteria = this.getCriteriaFromRoute(
state.params,
state.queryParams
);
this.search(criteria);
})
);
/**
* This stream is used for the Product Listing and Product Facets.
*
* It not only emits search results, but also performs a search on every change
* of the route (i.e. route params or query params).
*
* When a user leaves the PLP route, the PLP component unsubscribes from this stream
* so no longer the search is performed on route change.
*/
readonly model$: Observable<ProductSearchPage> = using(
() => this.searchByRouting$.subscribe(),
() => this.searchResults$
).pipe(shareReplay({ bufferSize: 1, refCount: true }));
/**
* Expose the `SearchCriteria`. The search criteria are driven by the route parameters.
*
* This search route configuration is not yet configurable
* (see https://github.com/SAP/spartacus/issues/7191).
*/
protected getCriteriaFromRoute(
routeParams: ProductListRouteParams,
queryParams: SearchCriteria
): SearchCriteria {
return {
query: queryParams.query || this.getQueryFromRouteParams(routeParams),
pageSize: queryParams.pageSize || this.defaultPageSize,
currentPage: queryParams.currentPage,
sortCode: queryParams.sortCode,
};
}
/**
* Resolves the search query from the given `ProductListRouteParams`.
*/
protected getQueryFromRouteParams({
query,
categoryCode,
brandCode,
}: ProductListRouteParams) {
if (query) {
return query;
}
if (categoryCode) {
return this.RELEVANCE_ALLCATEGORIES + categoryCode;
}
// TODO: drop support for brands as they should be treated
// similarly as any category.
if (brandCode) {
return this.RELEVANCE_ALLCATEGORIES + brandCode;
}
}
/**
* Performs a search based on the given search criteria.
*
* The search is delegated to the `ProductSearchService`.
*/
protected search(criteria: SearchCriteria): void {
const currentPage = criteria.currentPage;
const pageSize = criteria.pageSize;
const sort = criteria.sortCode;
this.productSearchService.search(
criteria.query,
// TODO: consider dropping this complex passing of cleaned object
Object.assign(
{},
currentPage && { currentPage },
pageSize && { pageSize },
sort && { sort }
)
);
}
/**
* Get items from a given page without using navigation
*/
getPageItems(pageNumber: number): void {
this.routing
.getRouterState()
.subscribe((route) => {
const routeCriteria = this.getCriteriaFromRoute(
route.state.params,
route.state.queryParams
);
const criteria = {
...routeCriteria,
currentPage: pageNumber,
};
this.search(criteria);
})
.unsubscribe();
}
/**
* Sort the search results by the given sort code.
*/
sort(sortCode: string): void {
this.route({ sortCode });
}
/**
* Routes to the next product listing page, using the given `queryParams`. The
* `queryParams` support sorting, pagination and querying.
*
* The `queryParams` are delegated to the Angular router `NavigationExtras`.
*/
protected route(queryParams: SearchCriteria): void {
this.router.navigate([], {
queryParams,
queryParamsHandling: 'merge',
relativeTo: this.activatedRoute,
});
}
/**
* The site context is used to update the search query in case of a
* changing context. The context will typically influence the search data.
*
* We keep this private for now, as we're likely refactoring this in the next
* major version.
*/
private get siteContext(): Observable<string>[] {
// TODO: we should refactor this so that custom context will be taken
// into account automatically. Ideally, we drop the specific context
// from the constructor, and query a ContextService for all contexts.
return [this.languageService.getActive(), this.currencyService.getActive()];
}
}
I call the Sort function in plp.ts as follows.
constructor(
protected productListComponentService: CustomProductListComponentService,
) {}
sortList(sortCode: string): void {
this.productListComponentService.sort(sortCode);
}
Sort process doesn't work. Can you help me? Thank you very much in advance.
You also need to replace "ProductListComponentService" to "CustomProductListComponentService" in "ProductFacetService".
The better way to do this is:
In providers of "ProductListModule", add:
{
provide: ProductListComponentService,
useClass: CustomProductListComponentService,
},

Creating an Observable that gets its value from a subscriptions calculation

Hi i'm trying to create an Observable that will have values emitted to it from another subscription, it this case an ngrx Store Reducer.
export class IsolatedAgentService {
missionList$: Observable<any>; // I need this observables subscription to emit to calculatedValue$
calculatedValue$:Observable<any>; // I need this observable to get its values from the subscription of missionList$ subscription
missionList:any;
constructor(
private _store:Store<any>
){
this.missionList$ = this._store.select(root_reducers.getMissionList).pipe(skip(1));
this.missionList$.subscribe((val:any)=> {
let mostIsolatedCountry:any; //will hold value of calculation
this.missionList = val;
mostIsolatedCountry = this.getMostIsolatedCountry(this.missionList);
// I want tot emit mostIsolatedCountry to another subscription
});
}
What I'm trying to do:
export class IsolatedAgentService {
missionList$: Observable<any>;
calculatedValue$:Observable<any> = Observable.create((observer)=>{
// moved this line here from the previous missionList$ subscription
let calculated:any = this.getMostIsolatedCountry(this.missionList);
observer.next(calculated)
});
missionList:any;
calculatedValue:any;
constructor(
private _store:Store<any>
){
this.missionList$ = this._store.select(root_reducers.getMissionList).pipe(skip(1));
this.missionList$.subscribe((val:any)=> {
let mostIsolatedCountry:any;
this.missionList = val;
this.calculatedValue$.subscribe((value)=>{
this.calculatedValue = value;
});
});
}
Currently, I'm basically seting a class property in one subscription, then inside that same subscription, after setting the class property, I'm calling the second subscription which calculates the value from that set class property.
This does not feel right, and I'm sure its not the way to do it, but I'm lacking in my rxjs/observable knowledge at this point.
Note! Im not interested in emiting the calculated value trough a Store Action, I want an Observable that is specific to the class instance.
Here's the answer to your question:
export class IsolatedAgentService {
missionList$: Observable<Mission[]>;
calculatedValue$:Observable<any>;
constructor(
private _store:Store<any>
){
this.missionList$ = this._store.select(root_reducers.getMissionList).pipe(skip(1));
this.calculatedValue$ = this.missionList$.pipe(
map( missions => this.getMostIsolatedCountry(missions) )
);
}
}
or even
this.calculatedValue$ = this.missionList$.pipe(
map( this.getMostIsolatedCountry )
);
See more about NGRX facades: https://medium.com/#thomasburleson_11450/ngrx-facades-better-state-management-82a04b9a1e39
Why you arent exposing & using observables why even subscribe in a service?
What you should have instead
export class IsolatedAgentService {
missionList$: Observable<Mission[]>;
calculatedValue$:Observable<any>;
constructor(
private _store:Store<any>
){
this.missionList$ = this._store.select(root_reducers.getMissionList).pipe(skip(1));
this.calculatedValue$ = this._store.select(root_reducers.getMissionCalculatedValue).pipe(skip(1));
}
}
And a selector for doing that calculation you need.
export const getMissionCalculatedValue= createSelector(
getMissionList,
(missionList) => {
// do the calculations here
return calculationResult;
}
);

Firestore pagination - Is there any query compatible with firebase's limitToLast?

Is there a way to implement back pagination with firestore?
I am struggling to implement pagination with firestore, and there are limited firestore queries for it. Forward pagination can be made by startAt and limit method, that is ok. But back pagination can't be easily done, because we only have endBefore, and endAt method, and how can we get last n elements from given document? I know realtime database have method limitToLast. Is there any query like this for firestore? (Also I need to implement multiple sorting, so getting last documents with "ASC" or "DESC" sorting will not work)
Help much appreciated.
Thanks!
The equivalent to the limitToLast(...) operation from the Firebase Realtime Database in Cloud Firestore is to order the data descending (which is possible in Firestore) and then just limit(...). If you're having problems implement this, update your question to show what you've done.
I agree that this is a sub-optimal API for back-pagination, since you're receiving the items in reverse order.
Simpler answer: Firestore now has .limitToLast(), which works exactly as you think it does. Used in my own (guess I need to publish it soon) Firestore Wrapper:
//////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
// *** Paginate API ***
export const PAGINATE_INIT = 0;
export const PAGINATE_PENDING = -1;
export const PAGINATE_UPDATED = 1;
export const PAGINATE_DEFAULT = 10;
export const PAGINATE_CHOICES = [10, 25, 50, 100, 250, 500];
/**
* #classdesc
* An object to allow for paginating a table read from Firestore. REQUIRES a sorting choice
* #property {Query} Query that forms basis for the table read
* #property {number} limit page size
* #property {QuerySnapshot} snapshot last successful snapshot/page fetched
* #property {enum} status status of pagination object
* #method PageForward pages the fetch forward
* #method PageBack pages the fetch backward
*/
export class PaginateFetch {
Query = null;
limit = PAGINATE_DEFAULT;
snapshot = null;
status = null; // -1 pending; 0 uninitialize; 1 updated;
/**
* ----------------------------------------------------------------------
* #constructs PaginateFetch constructs an object to paginate through large
* Firestore Tables
* #param {string} table a properly formatted string representing the requested collection
* - always an ODD number of elements
* #param {array} filterArray an (optional) 3xn array of filter(i.e. "where") conditions
* #param {array} sortArray a 2xn array of sort (i.e. "orderBy") conditions
* #param {ref} ref (optional) allows "table" parameter to reference a sub-collection
* of an existing document reference (I use a LOT of structered collections)
*
* The array is assumed to be sorted in the correct order -
* i.e. filterArray[0] is added first; filterArray[length-1] last
* returns data as an array of objects (not dissimilar to Redux State objects)
* with both the documentID and documentReference added as fields.
* #param {number} limit (optional)
* #returns {PaginateFetchObject}
**********************************************************************/
constructor(
table,
filterArray = null,
sortArray = null,
ref = null,
limit = PAGINATE_DEFAULT
) {
const db = ref ? ref : fdb;
this.limit = limit;
this.Query = sortQuery(
filterQuery(db.collection(table), filterArray),
sortArray
);
this.status = PAGINATE_INIT;
}
/**
* #method Page
* #returns Promise of a QuerySnapshot
*/
PageForward = () => {
const runQuery = this.snapshot
? this.Query.startAfter(_.last(this.snapshot.docs))
: this.Query;
this.status = PAGINATE_PENDING;
return runQuery
.limit(this.limit)
.get()
.then((QuerySnapshot) => {
this.status = PAGINATE_UPDATED;
//*IF* documents (i.e. haven't gone beyond start)
if (!QuerySnapshot.empty) {
//then update document set, and execute callback
//return Promise.resolve(QuerySnapshot);
this.snapshot = QuerySnapshot;
}
return this.snapshot.docs.map((doc) => {
return {
...doc.data(),
Id: doc.id,
ref: doc.ref
};
});
});
};
PageBack = () => {
const runQuery = this.snapshot
? this.Query.endBefore(this.snapshot.docs[0])
: this.Query;
this.status = PAGINATE_PENDING;
return runQuery
.limitToLast(this.limit)
.get()
.then((QuerySnapshot) => {
this.status = PAGINATE_UPDATED;
//*IF* documents (i.e. haven't gone back ebfore start)
if (!QuerySnapshot.empty) {
//then update document set, and execute callback
this.snapshot = QuerySnapshot;
}
return this.snapshot.docs.map((doc) => {
return {
...doc.data(),
Id: doc.id,
ref: doc.ref
};
});
});
};
}
/**
* ----------------------------------------------------------------------
* #function filterQuery
* builds and returns a query built from an array of filter (i.e. "where")
* consitions
* #param {Query} query collectionReference or Query to build filter upong
* #param {array} filterArray an (optional) 3xn array of filter(i.e. "where") conditions
* #returns Firestor Query object
*/
export const filterQuery = (query, filterArray = null) => {
return filterArray
? filterArray.reduce((accQuery, filter) => {
return accQuery.where(filter.fieldRef, filter.opStr, filter.value);
}, query)
: query;
};
/**
* ----------------------------------------------------------------------
* #function sortQuery
* builds and returns a query built from an array of filter (i.e. "where")
* consitions
* #param {Query} query collectionReference or Query to build filter upong
* #param {array} sortArray an (optional) 2xn array of sort (i.e. "orderBy") conditions
* #returns Firestor Query object
*/
export const sortQuery = (query, sortArray = null) => {
return sortArray
? sortArray.reduce((accQuery, sortEntry) => {
return accQuery.orderBy(sortEntry.fieldRef, sortEntry.dirStr || "asc");
//note "||" - if dirStr is not present(i.e. falsy) default to "asc"
}, query)
: query;
};
I also have the equivalent for CollectionGroup queries, and listeners for each as well.
I was running into this same issue, and not understanding why using limit with endAt wasn't returning the results I desired. I was attempting to implement a list in which you could paginate in both directions, first forward and then backward back to the start of the list.
To remedy the situation I decided to just cache the startAfter DocumentSnapshot for each page so that one can move both directions, in this way I never have to use endAt. The only time this will become an issue is if the collection of documents shifts or changes while the user is on a page other than the first page, but by returning to the first page it will reset to the beginning of the collection.
Yes. Building upon Frank's answer...
Have something like this in your query...
if (this.next) {
// if next, orderBy field descending, start after last field
q.orderBy('field', 'desc');
q.startAfter(this.marker);
} else if (this.prev) {
// if prev, orderBy field ascending, start after first field
q.orderBy('field', 'asc');
q.startAfter(this.marker);
} else {
// otherwise just display first page results normally
q.orderBy('field', 'desc');
}
q.limit(this.pageSize);
and then reverse it when you get the query...
this.testsCollection
.valueChanges({ idField: 'id' })
.pipe(
tap(results => {
if (this.prev) {
// if previous, need to reverse the results...
results.reverse();
}
})
)
I just want to share my code for Firestore pagination.
I am using react hooks w/ NextJS.
You will need to have "useFirestoreQuery" hook, which can be found here.
https://usehooks.com/useFirestoreQuery/
So here is my set up.
/* Context User */
const {user} = useUser()
/* States */
const [query, setQuery] = useState(null)
const [ref, setRef] = useState(null)
const [reverse, setReverse] = useState(false)
const [limit, setLimit] = useState(2)
const [lastID, setLastID] = useState(null)
const [firstID, setFirstID] = useState(null)
const [page, setPage] = useState(1)
/* Query Hook */
const fireCollection = useFirestoreQuery(query)
/* Set Ref, **When firebase initialized** */
useEffect(() => {
user?.uid &&
setRef(
firebase
.firestore()
.collection('products')
.where('type', '==', 'vaporizers')
)
}, [user])
/* Initial Query, **When ref set** */
useEffect(() => {
ref && setQuery(ref.orderBy('id', 'asc').limit(limit))
}, [ref])
/* Next Page */
const nextPage = useCallback(() => {
setPage((p) => parseInt(p) + 1)
setReverse(false)
setQuery(ref.orderBy('id', 'asc').startAfter(lastID).limit(limit))
}, [lastID, limit])
/* Prev Page */
const prevPage = useCallback(() => {
setPage((p) => parseInt(p) - 1)
setReverse(true)
setQuery(ref.orderBy('id', 'desc').startAfter(firstID).limit(limit))
}, [firstID, limit])
/* Product List */
const ProductList = ({fireCollection}) => {
const [products, setProducts] = useState([])
useEffect(() => {
let tempProducts = []
let tempIDs = []
const {data} = fireCollection
for (const key in data) {
const product = data[key]
tempIDs.push(product.id)
tempProducts.push(<ProductRow {...{product}} key={key} />)
}
if (reverse) {
tempProducts.reverse()
tempIDs.reverse()
}
setFirstID(tempIDs[0])
setLastID(tempIDs.pop())
setProducts(tempProducts)
}, [fireCollection])
return products
}
I moved the 'ProductList' outside of the component with a context provider, but this is the gist of it.
Note.
If you are looking for the total number of products. I suggest you keep up with the totals with these cloud functions. You will need to store your totals in a separate collection. I call mine 'shortcuts'.
exports.incrementProducts = functions.firestore
.document('products/{id}')
.onCreate(async (snap, context) => {
const createdProduct = snap.data()
/* Increment a shortcut collection that holds the totals to your products */
})
exports.decrementProducts = functions.firestore
.document('products/{id}')
.onDelete((snap, context) => {
const deletedProduct = snap.data()
/* Decrement a shortcut collection that holds the totals to your products */
})
Don't Forget
Make sure you set your indexes for all this to work. Here is what mine looks like.

Resources