fix: paginate bucket listing in Admin UI to show all buckets (#8585)

* fix: paginate bucket listing in Admin UI to show all buckets

The Admin UI's GetS3Buckets() had a hardcoded Limit of 1000 in the
ListEntries request, causing the Total Buckets count to cap at 1000
even when more buckets exist. This adds pagination to iterate through
all buckets by continuing from the last entry name when a full page
is returned.

Fixes seaweedfs/seaweedfs#8564

* feat: add server-side pagination and sorting to S3 buckets page

Add pagination controls, page size selector, and sortable column
headers to the Admin UI's Object Store buckets page, following the
same pattern used by the Cluster Volumes page. This ensures the UI
remains responsive with thousands of buckets.

- Add CurrentPage, TotalPages, PageSize, SortBy, SortOrder to S3BucketsData
- Accept page/pageSize/sortBy/sortOrder query params in ShowS3Buckets handler
- Sort buckets by name, owner, created, objects, logical/physical size
- Paginate results server-side (default 100 per page)
- Add pagination nav, page size dropdown, and sort indicators to template

* Update s3_buckets_templ.go

* Update object_store_users_templ.go

* fix: use errors.Is(err, io.EOF) instead of string comparison

Replace brittle err.Error() == "EOF" string comparison with idiomatic
errors.Is(err, io.EOF) for checking stream end in bucket listing.

* fix: address PR review findings for bucket pagination

- Clamp page to totalPages when page exceeds total, preventing empty
  results with misleading pagination state
- Fix sort comparator to use explicit ascending/descending comparisons
  with a name tie-breaker, satisfying strict weak ordering for sort.Slice
- Capture SnapshotTsNs from first ListEntries response and pass it to
  subsequent requests for consistent pagination across pages
- Replace non-focusable <th onclick> sort headers with <a> tags and
  reuse getSortIcon, matching the cluster_volumes accessibility pattern
- Change exportBucketList() to fetch all buckets from /api/s3/buckets
  instead of scraping DOM rows (which now only contain the current page)
This commit is contained in:
Chris Lu
2026-03-09 18:55:47 -07:00
committed by GitHub
parent 02d3e3195c
commit b991acf634
5 changed files with 771 additions and 149 deletions

View File

@@ -2,7 +2,9 @@ package dash
import (
"context"
"errors"
"fmt"
"io"
"net/http"
"sort"
"strings"
@@ -371,8 +373,21 @@ func (s *AdminServer) GetCredentialManager() *credential.CredentialManager {
// InvalidateCache method moved to cluster_topology.go
// GetS3BucketsData retrieves all Object Store buckets and aggregates total storage metrics
func (s *AdminServer) GetS3BucketsData() (S3BucketsData, error) {
// GetS3BucketsData retrieves Object Store buckets with pagination and sorting
func (s *AdminServer) GetS3BucketsData(page, pageSize int, sortBy, sortOrder string) (S3BucketsData, error) {
if page < 1 {
page = 1
}
if pageSize < 1 || pageSize > 1000 {
pageSize = 100
}
if sortBy == "" {
sortBy = "name"
}
if sortOrder == "" {
sortOrder = "asc"
}
buckets, err := s.GetS3Buckets()
if err != nil {
return S3BucketsData{}, err
@@ -383,14 +398,97 @@ func (s *AdminServer) GetS3BucketsData() (S3BucketsData, error) {
totalSize += bucket.PhysicalSize
}
totalBuckets := len(buckets)
// Sort buckets
s.sortBuckets(buckets, sortBy, sortOrder)
// Calculate pagination
totalPages := (totalBuckets + pageSize - 1) / pageSize
if totalPages == 0 {
totalPages = 1
}
if page > totalPages {
page = totalPages
}
startIndex := (page - 1) * pageSize
endIndex := startIndex + pageSize
if startIndex >= totalBuckets {
buckets = []S3Bucket{}
} else {
if endIndex > totalBuckets {
endIndex = totalBuckets
}
buckets = buckets[startIndex:endIndex]
}
return S3BucketsData{
Buckets: buckets,
TotalBuckets: len(buckets),
TotalBuckets: totalBuckets,
TotalSize: totalSize,
LastUpdated: time.Now(),
CurrentPage: page,
TotalPages: totalPages,
PageSize: pageSize,
SortBy: sortBy,
SortOrder: sortOrder,
}, nil
}
// sortBuckets sorts the bucket slice in place by the given field and order
func (s *AdminServer) sortBuckets(buckets []S3Bucket, sortBy, sortOrder string) {
desc := sortOrder == "desc"
sort.Slice(buckets, func(i, j int) bool {
a, b := buckets[i], buckets[j]
switch sortBy {
case "owner":
if a.Owner != b.Owner {
if desc {
return a.Owner > b.Owner
}
return a.Owner < b.Owner
}
case "created":
if !a.CreatedAt.Equal(b.CreatedAt) {
if desc {
return a.CreatedAt.After(b.CreatedAt)
}
return a.CreatedAt.Before(b.CreatedAt)
}
case "objects":
if a.ObjectCount != b.ObjectCount {
if desc {
return a.ObjectCount > b.ObjectCount
}
return a.ObjectCount < b.ObjectCount
}
case "logical_size":
if a.LogicalSize != b.LogicalSize {
if desc {
return a.LogicalSize > b.LogicalSize
}
return a.LogicalSize < b.LogicalSize
}
case "physical_size":
if a.PhysicalSize != b.PhysicalSize {
if desc {
return a.PhysicalSize > b.PhysicalSize
}
return a.PhysicalSize < b.PhysicalSize
}
}
// Tie-breaker: sort by name (also the default/primary for sortBy=="name")
if a.Name != b.Name {
if desc {
return a.Name > b.Name
}
return a.Name < b.Name
}
return false
})
}
// GetS3Buckets retrieves all Object Store buckets from the filer and collects size/object data from collections
func (s *AdminServer) GetS3Buckets() ([]S3Bucket, error) {
var buckets []S3Bucket
@@ -406,28 +504,48 @@ func (s *AdminServer) GetS3Buckets() ([]S3Bucket, error) {
// Now list buckets from the filer and match with collection data
err = s.WithFilerClient(func(client filer_pb.SeaweedFilerClient) error {
// List buckets by looking at the buckets directory
stream, err := client.ListEntries(context.Background(), &filer_pb.ListEntriesRequest{
Directory: filerConfig.BucketsPath,
Prefix: "",
StartFromFileName: "",
InclusiveStartFrom: false,
Limit: 1000,
})
if err != nil {
return err
}
// Paginate through all buckets in the buckets directory
const listPageSize = 1000
startFrom := ""
var snapshotTsNs int64
for {
resp, err := stream.Recv()
stream, err := client.ListEntries(context.Background(), &filer_pb.ListEntriesRequest{
Directory: filerConfig.BucketsPath,
Prefix: "",
StartFromFileName: startFrom,
InclusiveStartFrom: false,
Limit: listPageSize,
SnapshotTsNs: snapshotTsNs,
})
if err != nil {
if err.Error() == "EOF" {
break
}
return err
}
if resp.Entry != nil && resp.Entry.IsDirectory {
pageCount := 0
lastName := ""
for {
resp, err := stream.Recv()
if err != nil {
if errors.Is(err, io.EOF) {
break
}
return err
}
if snapshotTsNs == 0 && resp.SnapshotTsNs != 0 {
snapshotTsNs = resp.SnapshotTsNs
}
if resp.Entry == nil {
continue
}
lastName = resp.Entry.Name
pageCount++
if !resp.Entry.IsDirectory {
continue
}
bucketName := resp.Entry.Name
if strings.HasPrefix(bucketName, ".") {
// Skip internal/system directories from Object Store bucket listing.
@@ -502,6 +620,12 @@ func (s *AdminServer) GetS3Buckets() ([]S3Bucket, error) {
}
buckets = append(buckets, bucket)
}
// If we received fewer entries than the page size, we've listed everything
if pageCount < listPageSize {
break
}
startFrom = lastName
}
return nil

View File

@@ -26,6 +26,15 @@ type S3BucketsData struct {
TotalBuckets int `json:"total_buckets"`
TotalSize int64 `json:"total_size"`
LastUpdated time.Time `json:"last_updated"`
// Pagination
CurrentPage int `json:"current_page"`
TotalPages int `json:"total_pages"`
PageSize int `json:"page_size"`
// Sorting
SortBy string `json:"sort_by"`
SortOrder string `json:"sort_order"`
}
type CreateBucketRequest struct {
@@ -48,7 +57,7 @@ type CreateBucketRequest struct {
func (s *AdminServer) ShowS3Buckets(w http.ResponseWriter, r *http.Request) {
username := UsernameFromContext(r.Context())
data, err := s.GetS3BucketsData()
data, err := s.GetS3BucketsData(1, 100, "name", "asc")
if err != nil {
writeJSONError(w, http.StatusInternalServerError, "Failed to get Object Store buckets: "+err.Error())
return

View File

@@ -3,6 +3,7 @@ package handlers
import (
"net/http"
"net/url"
"strconv"
"time"
"github.com/gorilla/mux"
@@ -295,8 +296,26 @@ func (h *AdminHandlers) ShowDashboard(w http.ResponseWriter, r *http.Request) {
// ShowS3Buckets renders the Object Store buckets management page
func (h *AdminHandlers) ShowS3Buckets(w http.ResponseWriter, r *http.Request) {
// Get Object Store buckets data from the server
s3Data := h.getS3BucketsData(r)
// Get pagination and sorting parameters from query string
page := 1
if p := r.URL.Query().Get("page"); p != "" {
if parsed, err := strconv.Atoi(p); err == nil && parsed > 0 {
page = parsed
}
}
pageSize := 100
if ps := r.URL.Query().Get("pageSize"); ps != "" {
if parsed, err := strconv.Atoi(ps); err == nil && parsed > 0 && parsed <= 1000 {
pageSize = parsed
}
}
sortBy := defaultQuery(r.URL.Query().Get("sortBy"), "name")
sortOrder := defaultQuery(r.URL.Query().Get("sortOrder"), "asc")
// Get Object Store buckets data with pagination
s3Data := h.getS3BucketsData(r, page, pageSize, sortBy, sortOrder)
username := h.getUsername(r)
// Render HTML template
@@ -463,15 +482,15 @@ func (h *AdminHandlers) ShowBucketDetails(w http.ResponseWriter, r *http.Request
writeJSON(w, http.StatusOK, details)
}
// getS3BucketsData retrieves Object Store buckets data from the server
func (h *AdminHandlers) getS3BucketsData(r *http.Request) dash.S3BucketsData {
// getS3BucketsData retrieves Object Store buckets data from the server with pagination
func (h *AdminHandlers) getS3BucketsData(r *http.Request, page, pageSize int, sortBy, sortOrder string) dash.S3BucketsData {
username := dash.UsernameFromContext(r.Context())
if username == "" {
username = "admin"
}
// Get Object Store buckets data
data, err := h.adminServer.GetS3BucketsData()
data, err := h.adminServer.GetS3BucketsData(page, pageSize, sortBy, sortOrder)
if err != nil {
// Return empty data on error
return dash.S3BucketsData{
@@ -480,6 +499,11 @@ func (h *AdminHandlers) getS3BucketsData(r *http.Request) dash.S3BucketsData {
TotalBuckets: 0,
TotalSize: 0,
LastUpdated: time.Now(),
CurrentPage: 1,
TotalPages: 1,
PageSize: pageSize,
SortBy: sortBy,
SortOrder: sortOrder,
}
}

View File

@@ -12,12 +12,17 @@ templ S3Buckets(data dash.S3BucketsData) {
</h1>
<div class="btn-toolbar mb-2 mb-md-0">
<div class="btn-group me-2">
<button type="button" class="btn btn-sm btn-primary"
data-bs-toggle="modal"
<select class="form-select form-select-sm me-2" id="pageSizeSelect" onchange="changePageSize()" style="width: auto;">
<option value="50" if data.PageSize == 50 { selected="selected" }>50 per page</option>
<option value="100" if data.PageSize == 100 { selected="selected" }>100 per page</option>
<option value="200" if data.PageSize == 200 { selected="selected" }>200 per page</option>
<option value="500" if data.PageSize == 500 { selected="selected" }>500 per page</option>
</select>
<button type="button" class="btn btn-sm btn-primary"
data-bs-toggle="modal"
data-bs-target="#createBucketModal">
<i class="fas fa-plus me-1"></i>Create Bucket
</button>
</div>
</div>
</div>
@@ -112,12 +117,42 @@ templ S3Buckets(data dash.S3BucketsData) {
<table class="table table-hover" width="100%" cellspacing="0" id="bucketsTable">
<thead>
<tr>
<th>Name</th>
<th>Owner</th>
<th>Created</th>
<th>Objects</th>
<th>Logical Size</th>
<th>Physical Size</th>
<th>
<a href="#" onclick="sortTable('name')" class="text-decoration-none text-dark">
Name
@getSortIcon("name", data.SortBy, data.SortOrder)
</a>
</th>
<th>
<a href="#" onclick="sortTable('owner')" class="text-decoration-none text-dark">
Owner
@getSortIcon("owner", data.SortBy, data.SortOrder)
</a>
</th>
<th>
<a href="#" onclick="sortTable('created')" class="text-decoration-none text-dark">
Created
@getSortIcon("created", data.SortBy, data.SortOrder)
</a>
</th>
<th>
<a href="#" onclick="sortTable('objects')" class="text-decoration-none text-dark">
Objects
@getSortIcon("objects", data.SortBy, data.SortOrder)
</a>
</th>
<th>
<a href="#" onclick="sortTable('logical_size')" class="text-decoration-none text-dark">
Logical Size
@getSortIcon("logical_size", data.SortBy, data.SortOrder)
</a>
</th>
<th>
<a href="#" onclick="sortTable('physical_size')" class="text-decoration-none text-dark">
Physical Size
@getSortIcon("physical_size", data.SortBy, data.SortOrder)
</a>
</th>
<th>Quota</th>
<th>Versioning</th>
<th>Object Lock</th>
@@ -256,6 +291,61 @@ templ S3Buckets(data dash.S3BucketsData) {
</tbody>
</table>
</div>
<!-- Pagination Controls -->
if data.TotalPages > 1 {
<div class="d-flex justify-content-between align-items-center mt-3">
<small class="text-muted">
Showing { fmt.Sprintf("%d", (data.CurrentPage-1)*data.PageSize+1) } to { fmt.Sprintf("%d", minInt(data.CurrentPage*data.PageSize, data.TotalBuckets)) } of { fmt.Sprintf("%d", data.TotalBuckets) } buckets
</small>
<nav aria-label="Buckets pagination">
<ul class="pagination pagination-sm mb-0">
<!-- Previous Button -->
if data.CurrentPage > 1 {
<li class="page-item">
<a class="page-link pagination-link" href="#" data-page={fmt.Sprintf("%d", data.CurrentPage-1)}>
<i class="fas fa-chevron-left"></i>
</a>
</li>
} else {
<li class="page-item disabled">
<span class="page-link">
<i class="fas fa-chevron-left"></i>
</span>
</li>
}
<!-- Page Numbers -->
for i := maxInt(1, data.CurrentPage-2); i <= minInt(data.TotalPages, data.CurrentPage+2); i++ {
if i == data.CurrentPage {
<li class="page-item active">
<span class="page-link">{fmt.Sprintf("%d", i)}</span>
</li>
} else {
<li class="page-item">
<a class="page-link pagination-link" href="#" data-page={fmt.Sprintf("%d", i)}>{fmt.Sprintf("%d", i)}</a>
</li>
}
}
<!-- Next Button -->
if data.CurrentPage < data.TotalPages {
<li class="page-item">
<a class="page-link pagination-link" href="#" data-page={fmt.Sprintf("%d", data.CurrentPage+1)}>
<i class="fas fa-chevron-right"></i>
</a>
</li>
} else {
<li class="page-item disabled">
<span class="page-link">
<i class="fas fa-chevron-right"></i>
</span>
</li>
}
</ul>
</nav>
</div>
}
</div>
</div>
</div>
@@ -574,6 +664,15 @@ templ S3Buckets(data dash.S3BucketsData) {
let cachedUsers = null;
document.addEventListener('DOMContentLoaded', function() {
// Add click handlers to pagination links
document.querySelectorAll('.pagination-link').forEach(link => {
link.addEventListener('click', function(e) {
e.preventDefault();
const page = this.getAttribute('data-page');
goToPage(page);
});
});
// Initialize modal instances once (reuse with show/hide)
deleteModalInstance = new bootstrap.Modal(document.getElementById('deleteBucketModal'));
quotaModalInstance = new bootstrap.Modal(document.getElementById('manageQuotaModal'));
@@ -1032,57 +1131,90 @@ function displayBucketDetails(data) {
document.getElementById('bucketDetailsContent').innerHTML = rows.join('');
}
function goToPage(page) {
const url = new URL(window.location);
url.searchParams.set('page', page);
window.location.href = url.toString();
}
function changePageSize() {
const pageSize = document.getElementById('pageSizeSelect').value;
const url = new URL(window.location);
url.searchParams.set('pageSize', pageSize);
url.searchParams.set('page', '1');
window.location.href = url.toString();
}
function sortTable(column) {
const url = new URL(window.location);
const currentSort = url.searchParams.get('sortBy');
const currentOrder = url.searchParams.get('sortOrder') || 'asc';
let newOrder = 'asc';
if (currentSort === column && currentOrder === 'asc') {
newOrder = 'desc';
}
url.searchParams.set('sortBy', column);
url.searchParams.set('sortOrder', newOrder);
url.searchParams.set('page', '1');
window.location.href = url.toString();
}
function exportBucketList() {
// RFC 4180 compliant CSV escaping: escape double quotes by doubling them
function escapeCsvField(value) {
const str = String(value ?? '');
// If the field contains comma, double quote, or newline, wrap in quotes and escape internal quotes
if (str.includes(',') || str.includes('"') || str.includes('\n') || str.includes('\r')) {
return '"' + str.replace(/"/g, '""') + '"';
}
return '"' + str + '"';
}
const buckets = Array.from(document.querySelectorAll('#bucketsTable tbody tr')).map(row => {
const cells = row.querySelectorAll('td');
if (cells.length > 1) {
return {
name: cells[0].textContent.trim(),
owner: cells[1].textContent.trim(),
created: cells[2].textContent.trim(),
objects: cells[3].textContent.trim(),
function formatBytes(bytes) {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
}
logicalSize: cells[4].textContent.trim(),
physicalSize: cells[5].textContent.trim(),
quota: cells[6].textContent.trim(),
versioning: cells[7].textContent.trim(),
objectLock: cells[8].textContent.trim()
};
}
return null;
}).filter(bucket => bucket !== null);
// Fetch all buckets from the API (not just the current page)
fetch('/api/s3/buckets')
.then(response => response.json())
.then(data => {
if (data.error) {
alert('Error exporting buckets: ' + data.error);
return;
}
const csvContent = "data:text/csv;charset=utf-8," +
"Name,Owner,Logical Size,Physical Size,Object Count,Created,Quota,Versioning,Object Lock\n" +
buckets.map(b => [
escapeCsvField(b.name),
escapeCsvField(b.owner),
escapeCsvField(b.logicalSize),
escapeCsvField(b.physicalSize),
escapeCsvField(b.objects),
escapeCsvField(b.created),
escapeCsvField(b.quota),
escapeCsvField(b.versioning),
escapeCsvField(b.objectLock)
].join(',')).join("\n");
const buckets = data.buckets || [];
const csvContent = "data:text/csv;charset=utf-8," +
"Name,Owner,Logical Size,Physical Size,Object Count,Created,Quota,Versioning,Object Lock\n" +
buckets.map(b => [
escapeCsvField(b.name),
escapeCsvField(b.owner),
escapeCsvField(formatBytes(b.logical_size)),
escapeCsvField(formatBytes(b.physical_size)),
escapeCsvField(b.object_count),
escapeCsvField(b.created_at),
escapeCsvField(b.quota_enabled ? formatBytes(b.quota) : 'No quota'),
escapeCsvField(b.versioning_status || 'Not configured'),
escapeCsvField(b.object_lock_enabled ? 'Enabled' : 'Not configured')
].join(',')).join("\n");
const encodedUri = encodeURI(csvContent);
const link = document.createElement("a");
link.setAttribute("href", encodedUri);
link.setAttribute("download", "buckets.csv");
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
const encodedUri = encodeURI(csvContent);
const link = document.createElement("a");
link.setAttribute("href", encodedUri);
link.setAttribute("download", "buckets.csv");
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
})
.catch(error => {
console.error('Error:', error);
alert('Error exporting buckets: ' + error.message);
});
}
</script>
}

File diff suppressed because one or more lines are too long