设两个指针i=0,j=1,一个长度变量k=0
a[i+k]==a[j+k] k++;
a[i+k]>a[j+k] i = i+k+1;
a[i+k]
#include<cstdio>
#include<algorithm>
#include<iostream>
#include<string.h>
using namespace std;
char a[1000005];
int nexta[1000005],la;
int minOrMaxExpress(int flag)//flag 为true 最小表示法 false 最大表示法
{
int i=0,j=1,k=0;
while(i<la&&j<la&&k<la)
{
// printf("%d %d\n",i,j);
int pre = a[(i+k)%la]-a[(j+k)%la];
if(!pre)k++;
else{
if(flag)
{
if(pre>0)i = i+k+1;
else j = j+k+1;
}
else
{
if(pre>0)j = j+k+1;
else i = i+k+1;
}
if(i==j)j++;
k = 0;
}
}
// printf("%d\n",min(i,j));
return min(i,j);
}
void getNext()
{
int i=0,j=-1;
nexta[0] = -1;
while(i<la)
{
if(a[i]==a[j]||j==-1)
{
i++;
j++;
nexta[i] = j;
}
else j = nexta[j];
}
}
int main()
{
while(scanf("%s",a)!=EOF)
{
la = strlen(a);
int ansMin = minOrMaxExpress(true)+1;
int ansMax = minOrMaxExpress(false)+1;
getNext();
int ans = la%(la-nexta[la])? 1:la/(la-nexta[la]);
printf("%d %d %d %d\n",ansMin,ans,ansMax,ans);
}
return 0;
}